per_node_diag = {'ns_1@127.0.0.1', [{version, [{public_key,"0.13"}, {lhttpc,"1.3.0"}, {ale,"8cffe61"}, {os_mon,"2.2.7"}, {couch_set_view,"1.2.0a-00105ea-git"}, {mnesia,"4.5"}, {inets,"5.7.1"}, {couch,"1.2.0a-00105ea-git"}, {mapreduce,"1.0.0"}, {couch_index_merger,"1.2.0a-00105ea-git"}, {kernel,"2.14.5"}, {crypto,"2.0.4"}, {ssl,"4.1.6"}, {sasl,"2.1.10"}, {couch_view_parser,"1.0.0"}, {ns_server,"2.0.0-1949-rel-community"}, {mochiweb,"1.4.1"}, {oauth,"7d85d3ef"}, {stdlib,"1.17.5"}]}, {manifest, ["","", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""]}, {config, [{buckets, [{'_vclock',[{'ns_1@127.0.0.1',{8,63520051003}}]}, {configs,[]}]}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {memory_quota,2391}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,'filtered-out'}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {nodes_wanted,['ns_1@127.0.0.1']}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]},{2,0}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {rest,[{port,8091}]}, {rest_creds, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520050248}}]}, {creds,[{"Administrator",[{password,'filtered-out'}]}]}]}]}, {basic_info, [{version, [{public_key,"0.13"}, {lhttpc,"1.3.0"}, {ale,"8cffe61"}, {os_mon,"2.2.7"}, {couch_set_view,"1.2.0a-00105ea-git"}, {mnesia,"4.5"}, {inets,"5.7.1"}, {couch,"1.2.0a-00105ea-git"}, {mapreduce,"1.0.0"}, {couch_index_merger,"1.2.0a-00105ea-git"}, {kernel,"2.14.5"}, {crypto,"2.0.4"}, {ssl,"4.1.6"}, {sasl,"2.1.10"}, {couch_view_parser,"1.0.0"}, {ns_server,"2.0.0-1949-rel-community"}, {mochiweb,"1.4.1"}, {oauth,"7d85d3ef"}, {stdlib,"1.17.5"}]}, {supported_compat_version,[2,0]}, {system_arch,"i386-apple-darwin11.4.0"}, {wall_clock,2581}, {memory_data,{4168124000,4114120000,{<0.6.0>,1801744}}}, {disk_data,[{"/",487546976,7}]}]}, {memory,{4168124000,4114120000,{<0.6.0>,1801744}}}, {disk,[{"/",487546976,7}]}, {active_tasks, [[{pid,<<"<0.3717.0>">>}, {limit,4}, {running,0}, {started_on,1352829699}, {type,couch_main_index_barrier}, {updated_on,1352829699}, {waiting,0}], [{pid,<<"<0.3718.0>">>}, {limit,2}, {running,0}, {started_on,1352829699}, {type,couch_replica_index_barrier}, {updated_on,1352829699}, {waiting,0}]]}, {master_events, [{{1352,829703,916923}, vbucket_state_change,"default",'ns_1@127.0.0.1',63,active}, {{1352,829703,917474}, vbucket_state_change,"default",'ns_1@127.0.0.1',62,active}, {{1352,829703,917924}, vbucket_state_change,"default",'ns_1@127.0.0.1',61,active}, {{1352,829703,918267}, vbucket_state_change,"default",'ns_1@127.0.0.1',60,active}, {{1352,829703,918640}, vbucket_state_change,"default",'ns_1@127.0.0.1',59,active}, {{1352,829703,919309}, vbucket_state_change,"default",'ns_1@127.0.0.1',58,active}, {{1352,829703,920082}, vbucket_state_change,"default",'ns_1@127.0.0.1',57,active}, {{1352,829703,920647}, vbucket_state_change,"default",'ns_1@127.0.0.1',56,active}, {{1352,829703,921000}, vbucket_state_change,"default",'ns_1@127.0.0.1',55,active}, {{1352,829703,921311}, vbucket_state_change,"default",'ns_1@127.0.0.1',54,active}, {{1352,829703,921889}, vbucket_state_change,"default",'ns_1@127.0.0.1',53,active}, {{1352,829703,922522}, vbucket_state_change,"default",'ns_1@127.0.0.1',52,active}, {{1352,829703,923058}, vbucket_state_change,"default",'ns_1@127.0.0.1',51,active}, {{1352,829703,923450}, vbucket_state_change,"default",'ns_1@127.0.0.1',50,active}, {{1352,829703,923881}, vbucket_state_change,"default",'ns_1@127.0.0.1',49,active}, {{1352,829703,924239}, vbucket_state_change,"default",'ns_1@127.0.0.1',48,active}, {{1352,829703,924620}, vbucket_state_change,"default",'ns_1@127.0.0.1',47,active}, {{1352,829703,925107}, vbucket_state_change,"default",'ns_1@127.0.0.1',46,active}, {{1352,829703,925542}, vbucket_state_change,"default",'ns_1@127.0.0.1',45,active}, {{1352,829703,925991}, vbucket_state_change,"default",'ns_1@127.0.0.1',44,active}, {{1352,829703,926371}, vbucket_state_change,"default",'ns_1@127.0.0.1',43,active}, {{1352,829703,926726}, vbucket_state_change,"default",'ns_1@127.0.0.1',42,active}, {{1352,829703,927002}, vbucket_state_change,"default",'ns_1@127.0.0.1',41,active}, {{1352,829703,927389}, vbucket_state_change,"default",'ns_1@127.0.0.1',40,active}, {{1352,829703,928017}, vbucket_state_change,"default",'ns_1@127.0.0.1',39,active}, {{1352,829703,929365}, vbucket_state_change,"default",'ns_1@127.0.0.1',38,active}, {{1352,829703,930257}, vbucket_state_change,"default",'ns_1@127.0.0.1',37,active}, {{1352,829703,931079}, vbucket_state_change,"default",'ns_1@127.0.0.1',36,active}, {{1352,829703,932278}, vbucket_state_change,"default",'ns_1@127.0.0.1',35,active}, {{1352,829703,933045}, vbucket_state_change,"default",'ns_1@127.0.0.1',34,active}, {{1352,829703,934094}, vbucket_state_change,"default",'ns_1@127.0.0.1',33,active}, {{1352,829703,934655}, vbucket_state_change,"default",'ns_1@127.0.0.1',32,active}, {{1352,829703,935713}, vbucket_state_change,"default",'ns_1@127.0.0.1',31,active}, {{1352,829703,936462}, vbucket_state_change,"default",'ns_1@127.0.0.1',30,active}, {{1352,829703,937422}, vbucket_state_change,"default",'ns_1@127.0.0.1',29,active}, {{1352,829703,937815}, vbucket_state_change,"default",'ns_1@127.0.0.1',28,active}, {{1352,829703,938284}, vbucket_state_change,"default",'ns_1@127.0.0.1',27,active}, {{1352,829703,939043}, vbucket_state_change,"default",'ns_1@127.0.0.1',26,active}, {{1352,829703,939771}, vbucket_state_change,"default",'ns_1@127.0.0.1',25,active}, {{1352,829703,941177}, vbucket_state_change,"default",'ns_1@127.0.0.1',24,active}, {{1352,829703,941853}, vbucket_state_change,"default",'ns_1@127.0.0.1',23,active}, {{1352,829703,942388}, vbucket_state_change,"default",'ns_1@127.0.0.1',22,active}, {{1352,829703,942959}, vbucket_state_change,"default",'ns_1@127.0.0.1',21,active}, {{1352,829703,943431}, vbucket_state_change,"default",'ns_1@127.0.0.1',20,active}, {{1352,829703,944670}, vbucket_state_change,"default",'ns_1@127.0.0.1',19,active}, {{1352,829703,946200}, vbucket_state_change,"default",'ns_1@127.0.0.1',18,active}, {{1352,829703,946891}, vbucket_state_change,"default",'ns_1@127.0.0.1',17,active}, {{1352,829703,947664}, vbucket_state_change,"default",'ns_1@127.0.0.1',16,active}, {{1352,829703,948554}, vbucket_state_change,"default",'ns_1@127.0.0.1',15,active}, {{1352,829703,949505}, vbucket_state_change,"default",'ns_1@127.0.0.1',14,active}, {{1352,829703,950162}, vbucket_state_change,"default",'ns_1@127.0.0.1',13,active}, {{1352,829703,952156}, vbucket_state_change,"default",'ns_1@127.0.0.1',12,active}, {{1352,829703,953011}, vbucket_state_change,"default",'ns_1@127.0.0.1',11,active}, {{1352,829703,954266}, vbucket_state_change,"default",'ns_1@127.0.0.1',10,active}, {{1352,829703,954607}, vbucket_state_change,"default",'ns_1@127.0.0.1',9,active}, {{1352,829703,955841}, vbucket_state_change,"default",'ns_1@127.0.0.1',8,active}, {{1352,829703,957402}, vbucket_state_change,"default",'ns_1@127.0.0.1',7,active}, {{1352,829703,958203}, vbucket_state_change,"default",'ns_1@127.0.0.1',6,active}, {{1352,829703,958927}, vbucket_state_change,"default",'ns_1@127.0.0.1',5,active}, {{1352,829703,960138}, vbucket_state_change,"default",'ns_1@127.0.0.1',4,active}, {{1352,829703,960453}, vbucket_state_change,"default",'ns_1@127.0.0.1',3,active}, {{1352,829703,961092}, vbucket_state_change,"default",'ns_1@127.0.0.1',2,active}, {{1352,829703,962899}, vbucket_state_change,"default",'ns_1@127.0.0.1',1,active}, {{1352,829703,963946}, vbucket_state_change,"default",'ns_1@127.0.0.1',0,active}, {{1352,831254,331887},delete_bucket,"default"}, {{1352,831717,205358}, create_bucket,"beer-sample",membase, [{num_replicas,1}, {replica_index,true}, {auth_type,sasl}, {sasl_password,[]}, {ram_quota,104857600}]}, {{1352,831718,214217}, set_map,"beer-sample", [{0,[],['ns_1@127.0.0.1',undefined]}, {1,[],['ns_1@127.0.0.1',undefined]}, {2,[],['ns_1@127.0.0.1',undefined]}, {3,[],['ns_1@127.0.0.1',undefined]}, {4,[],['ns_1@127.0.0.1',undefined]}, {5,[],['ns_1@127.0.0.1',undefined]}, {6,[],['ns_1@127.0.0.1',undefined]}, {7,[],['ns_1@127.0.0.1',undefined]}, {8,[],['ns_1@127.0.0.1',undefined]}, {9,[],['ns_1@127.0.0.1',undefined]}, {10,[],['ns_1@127.0.0.1',undefined]}, {11,[],['ns_1@127.0.0.1',undefined]}, {12,[],['ns_1@127.0.0.1',undefined]}, {13,[],['ns_1@127.0.0.1',undefined]}, {14,[],['ns_1@127.0.0.1',undefined]}, {15,[],['ns_1@127.0.0.1',undefined]}, {16,[],['ns_1@127.0.0.1',undefined]}, {17,[],['ns_1@127.0.0.1',undefined]}, {18,[],['ns_1@127.0.0.1',undefined]}, {19,[],['ns_1@127.0.0.1',undefined]}, {20,[],['ns_1@127.0.0.1',undefined]}, {21,[],['ns_1@127.0.0.1',undefined]}, {22,[],['ns_1@127.0.0.1',undefined]}, {23,[],['ns_1@127.0.0.1',undefined]}, {24,[],['ns_1@127.0.0.1',undefined]}, {25,[],['ns_1@127.0.0.1',undefined]}, {26,[],['ns_1@127.0.0.1',undefined]}, {27,[],['ns_1@127.0.0.1',undefined]}, {28,[],['ns_1@127.0.0.1',undefined]}, {29,[],['ns_1@127.0.0.1',undefined]}, {30,[],['ns_1@127.0.0.1',undefined]}, {31,[],['ns_1@127.0.0.1',undefined]}, {32,[],['ns_1@127.0.0.1',undefined]}, {33,[],['ns_1@127.0.0.1',undefined]}, {34,[],['ns_1@127.0.0.1',undefined]}, {35,[],['ns_1@127.0.0.1',undefined]}, {36,[],['ns_1@127.0.0.1',undefined]}, {37,[],['ns_1@127.0.0.1',undefined]}, {38,[],['ns_1@127.0.0.1',undefined]}, {39,[],['ns_1@127.0.0.1',undefined]}, {40,[],['ns_1@127.0.0.1',undefined]}, {41,[],['ns_1@127.0.0.1',undefined]}, {42,[],['ns_1@127.0.0.1',undefined]}, {43,[],['ns_1@127.0.0.1',undefined]}, {44,[],['ns_1@127.0.0.1',undefined]}, {45,[],['ns_1@127.0.0.1',undefined]}, {46,[],['ns_1@127.0.0.1',undefined]}, {47,[],['ns_1@127.0.0.1',undefined]}, {48,[],['ns_1@127.0.0.1',undefined]}, {49,[],['ns_1@127.0.0.1',undefined]}, {50,[],['ns_1@127.0.0.1',undefined]}, {51,[],['ns_1@127.0.0.1',undefined]}, {52,[],['ns_1@127.0.0.1',undefined]}, {53,[],['ns_1@127.0.0.1',undefined]}, {54,[],['ns_1@127.0.0.1',undefined]}, {55,[],['ns_1@127.0.0.1',undefined]}, {56,[],['ns_1@127.0.0.1',undefined]}, {57,[],['ns_1@127.0.0.1',undefined]}, {58,[],['ns_1@127.0.0.1',undefined]}, {59,[],['ns_1@127.0.0.1',undefined]}, {60,[],['ns_1@127.0.0.1',undefined]}, {61,[],['ns_1@127.0.0.1',undefined]}, {62,[],['ns_1@127.0.0.1',undefined]}, {63,[],['ns_1@127.0.0.1',undefined]}]}, {{1352,831718,216084}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',63,active}, {{1352,831718,238021}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',62,active}, {{1352,831718,239452}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',61,active}, {{1352,831718,240424}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',60,active}, {{1352,831718,241054}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',59,active}, {{1352,831718,241744}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',58,active}, {{1352,831718,242367}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',57,active}, {{1352,831718,243118}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',56,active}, {{1352,831718,243826}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',55,active}, {{1352,831718,244463}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',54,active}, {{1352,831718,245199}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',53,active}, {{1352,831718,245858}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',52,active}, {{1352,831718,246449}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',51,active}, {{1352,831718,329189}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',50,active}, {{1352,831718,329975}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',49,active}, {{1352,831718,330922}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',48,active}, {{1352,831718,331512}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',47,active}, {{1352,831718,332008}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',46,active}, {{1352,831718,332459}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',45,active}, {{1352,831718,333009}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',44,active}, {{1352,831718,333462}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',43,active}, {{1352,831718,333933}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',42,active}, {{1352,831718,334371}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',41,active}, {{1352,831718,334734}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',40,active}, {{1352,831718,386633}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',39,active}, {{1352,831718,388754}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',38,active}, {{1352,831718,390139}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',37,active}, {{1352,831718,391496}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',36,active}, {{1352,831718,392778}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',35,active}, {{1352,831718,393857}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',34,active}, {{1352,831718,395126}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',33,active}, {{1352,831718,395920}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',32,active}, {{1352,831718,396603}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',31,active}, {{1352,831718,397244}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',30,active}, {{1352,831718,397891}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',29,active}, {{1352,831718,398532}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',28,active}, {{1352,831718,399188}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',27,active}, {{1352,831718,474865}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',26,active}, {{1352,831718,476262}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',25,active}, {{1352,831718,477107}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',24,active}, {{1352,831718,477963}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',23,active}, {{1352,831718,478823}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',22,active}, {{1352,831718,479697}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',21,active}, {{1352,831718,480398}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',20,active}, {{1352,831718,480974}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',19,active}, {{1352,831718,481542}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',18,active}, {{1352,831718,482115}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',17,active}, {{1352,831718,482709}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',16,active}, {{1352,831718,483290}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',15,active}, {{1352,831718,483913}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',14,active}, {{1352,831718,484461}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',13,active}, {{1352,831718,485013}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',12,active}, {{1352,831718,485548}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',11,active}, {{1352,831718,486080}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',10,active}, {{1352,831718,486653}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',9,active}, {{1352,831718,487144}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',8,active}, {{1352,831718,487575}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',7,active}, {{1352,831718,488030}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',6,active}, {{1352,831718,488462}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',5,active}, {{1352,831718,488874}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',4,active}, {{1352,831718,489285}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',3,active}, {{1352,831718,489694}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',2,active}, {{1352,831718,490108}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',1,active}, {{1352,831718,490525}, vbucket_state_change,"beer-sample",'ns_1@127.0.0.1',0,active}, {{1352,831803,362983},delete_bucket,"beer-sample"}]}, {ns_server_stats, [{{avg_5m,"unknown",long_calls_rate},0.0}, {{"unknown",long_calls},0}, {config_merger_queue_len,0}, {{"ns_memcached-beer-sample",q_call_time},1568185}, {{avg_1m,config_merger_run_time},0.0}, {{"unknown",e2e_calls},0}, {{avg_1m,config_merger_runs_rate},0.0}, {{"unknown",e2e_call_time},0}, {{avg_10s,config_merger_sleep_time},0.0}, {{avg_1m,"unknown",call_time},0.0}, {{avg_5m,config_merger_queue_len},0.0}, {{"unknown",calls},0}, {{avg_5m,config_merger_runs_rate},0.0}, {{avg_10s,"unknown",long_calls_rate},0.0}, {{"unknown",call_time},0}, {{avg_5m,"unknown",e2e_calls_rate},1.1269665985223537e-6}, {{"unknown",long_call_time},0}, {{avg_1m,config_merger_queue_len},0.0}, {{avg_10s,config_merger_queue_len},0.0}, {{avg_5m,"unknown",call_time},0.0}, {{avg_5m,config_merger_run_time},0.0}, {{"ns_memcached-default",q_call_time},7104921}, {{avg_10s,"unknown",e2e_call_time},5.103653133476983e-5}, {{avg_1m,"unknown",e2e_calls_rate},0.019840618490909498}, {{avg_1m,"unknown",calls_rate},0.0}, {{avg_10s,"unknown",e2e_calls_rate},9.251324299072737e-8}, {{avg_5m,"unknown",long_call_time},0.0}, {total_config_merger_run_time,0}, {total_config_merger_sleep_time,0}, {{avg_5m,"unknown",e2e_call_time},6.176990374606899e-4}, {{avg_1m,"unknown",e2e_call_time},11.162036472611426}, {{avg_1m,"unknown",long_call_time},0.0}, {ns_config_rep_push_keys_total_retries_left,20}, {{avg_10s,"unknown",long_call_time},0.0}, {ns_config_rep_push_keys_retries,2}, {tracked_ns_memcacheds,[]}, {total_config_merger_runs,0}, {{avg_10s,config_merger_run_time},0.0}, {{avg_10s,"unknown",calls_rate},0.0}, {{avg_5m,"unknown",calls_rate},0.0}, {{avg_5m,config_merger_sleep_time},0.0}, {{avg_1m,"unknown",long_calls_rate},0.0}, {{avg_1m,config_merger_sleep_time},0.0}, {{avg_10s,config_merger_runs_rate},0.0}, {{avg_10s,"unknown",call_time},0.0}]}, {active_buckets,[]}, {tap_stats,[]}]} per_node_processes = {'ns_1@127.0.0.1', [{<0.28830.0>, [{registered_name,[]}, {status,running}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00000001099065b8 (unknown function)">>, <<"CP: 0x000000010b9fe2a0 (diag_handler:grab_process_info/1 + 56)">>, <<>>, <<"x000000010fa567e8 Return addr 0x000000010b9fece0 (diag_handler:grab_process_infos_loop/2 +">>, <<"y(0) []">>,<<>>, <<"0x000000010fa567f8 Return addr 0x000000010b9fe8a0 (diag_handler:do_diag_per_node/0 + 264)">>, <<"(0) [{<0.28829.0>,[{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2">>, <<"y(1) []">>,<<"y(2) <0.28830.0>">>, <<"y(3) Catch 0x000000010b9fece0 (diag_handler:grab_process_infos_loop/2 + 120)">>, <<>>, <<"x000000010fa56820 Return addr 0x000000010a7f4a60 (rpc:'-handle_call_call/6-fun-0-'/5 + 192">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) []">>,<<"y(5) []">>, <<"y(6) []">>, <<"(7) [{version,[{public_key,\"0.13\"},{lhttpc,\"1.3.0\"},{ale,\"8cffe61\"},{os_mon,\"2.2.7\"},{">>, <<"(8) [{buckets,[{'_vclock',[{'ns_1@127.0.0.1',{8,63520051003}}]},{configs,[]}]},{vbucke">>, <<"(9) [\"\",\"\",\" >, <<"(10) [{public_key,\"0.13\"},{lhttpc,\"1.3.0\"},{ale,\"8cffe61\"},{os_mon,\"2.2.7\"},{couch_set_">>, <<"y(11) []">>,<<>>, <<"0x000000010fa56888 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x000000010a7f4a60 (rpc:'-handle_call_call/6-fun-0-'/5 + 192)">>, <<"y(1) []">>,<<"y(2) []">>,<<"y(3) []">>, <<"y(4) <0.12.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,75025}, {total_heap_size,103682}, {links,[]}, {memory,830224}, {message_queue_len,0}, {reductions,89036}, {trap_exit,false}]}, {<0.28829.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010a7d4e18 (gen_server:rec_nodes/7 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"x000000010f86a618 Return addr 0x000000010a7da7b8 (gen_server:'-do_multi_call/4-fun-0-'/6 +">>, <<"y(0) #Ref<0.0.1.229822>">>,<<"y(1) 2000">>, <<"y(2) []">>,<<"y(3) []">>,<<"y(4) rex">>, <<"y(5) #Ref<0.0.1.229818>">>,<<"y(6) []">>, <<"y(7) #Ref<0.0.1.229821>">>, <<"y(8) 'ns_1@127.0.0.1'">>,<<>>, <<"0x000000010f86a668 Return addr 0x000000010963bc08 ()">>, <<"y(0) #Ref<0.0.1.229818>">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) []">>,<<"y(4) []">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[]}, {memory,2776}, {message_queue_len,0}, {reductions,13}, {trap_exit,true}]}, {<0.28828.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fd561a0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59415">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fd561b8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fd561c8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,665516}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fd561f8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,610}, {total_heap_size,610}, {links,[<0.3943.0>]}, {memory,5792}, {message_queue_len,0}, {reductions,20}, {trap_exit,false}]}, {<0.28825.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099dd998 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59394">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x00000001099dd9b0 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x00000001099dd9c0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,664059}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x00000001099dd9f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,610}, {total_heap_size,610}, {links,[<0.3943.0>]}, {memory,5792}, {message_queue_len,0}, {reductions,20}, {trap_exit,false}]}, {<0.28758.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcec670 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59399">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fcec688 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fcec698 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,12074}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fcec6c8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,193}, {trap_exit,false}]}, {<0.28686.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe2aa18 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59398">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fe2aa30 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe2aa40 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,996637}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fe2aa70 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,323}, {trap_exit,false}]}, {<0.28611.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcf0420 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59397">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fcf0438 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fcf0448 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,988493}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fcf0478 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,454}, {trap_exit,false}]}, {<0.28542.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f59fbc8 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59396">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010f59fbe0 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f59fbf0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,972321}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010f59fc20 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,584}, {trap_exit,false}]}, {<0.28372.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010963bc00 (unknown function)">>, <<"CP: 0x000000010963bc08 ()">>, <<"arity = 3">>,<<" menelaus_web">>, <<" handle_pool_info_wait_wake">>, <<" [{mochiweb_request,#Port<0.31497>,'GET',\"/pools/default?uuid=b34a9c2e03786d913446a4e8491">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,1798}, {total_heap_size,1798}, {links,[<0.3943.0>,#Port<0.31497>]}, {memory,15552}, {message_queue_len,0}, {reductions,38042}, {trap_exit,false}]}, {<0.28365.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe04818 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59393">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fe04830 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe04840 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437145}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fe04870 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28364.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcf78d0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59389">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fcf78e8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fcf78f8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437123}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fcf7928 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28363.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcf5118 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59388">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fcf5130 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fcf5140 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437117}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fcf5170 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28361.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f56c7e0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59386">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010f56c7f8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f56c808 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437107}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010f56c838 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28360.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f6fdad0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59387">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010f6fdae8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f6fdaf8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437112}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010f6fdb28 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28359.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe52958 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59385">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fe52970 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe52980 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437101}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fe529b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28358.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5c8930 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59384">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010f5c8948 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f5c8958 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437095}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010f5c8988 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28356.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d4870 (gen_server:do_multi_call/4 + 736)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f609e08 Return addr 0x000000010a7f3140 (rpc:do_multicall/5 + 256)">>, <<"y(0) []">>,<<"y(1) #Ref<0.0.1.229819>">>, <<"y(2) <0.28829.0>">>,<<"y(3) #Ref<0.0.1.229818>">>, <<>>, <<"0x000000010f609e30 Return addr 0x000000010b9fee58 (diag_handler:diag_multicall/3 + 208)">>, <<"y(0) []">>,<<"y(1) []">>,<<>>, <<"0x000000010f609e48 Return addr 0x000000010b9ff9e8 (diag_handler:handle_just_diag/2 + 368)">>, <<"y(0) ['ns_1@127.0.0.1']">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) []">>,<<>>, <<"0x000000010f609e70 Return addr 0x000000010b9ffe20 (diag_handler:do_handle_diag/2 + 40)">>, <<"y(0) []">>, <<"(1) {mochiweb_response,{mochiweb_request,#Port<0.31498>,'GET',\"/diag\",{1,1},{9,{\"host\"">>, <<>>, <<"0x000000010f609e88 Return addr 0x000000010bbf7f80 (menelaus_web:loop/3 + 38960)">>, <<"y(0) []">>,<<>>, <<"0x000000010f609e98 Return addr 0x000000010b9d7f90 (mochiweb_http:headers/5 + 1176)">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>, <<"(4) {mochiweb_request,#Port<0.31498>,'GET',\"/diag\",{1,1},{9,{\"host\",{'Host',\"127.0.0.1">>, <<"y(5) Catch 0x000000010bbf84c8 (menelaus_web:loop/3 + 40312)">>, <<>>, <<"0x000000010f609ed0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) []">>,<<"y(2) []">>, <<"(3) {mochiweb_request,#Port<0.31498>,'GET',\"/diag\",{1,1},{9,{\"host\",{'Host',\"127.0.0.1">>, <<>>, <<"0x000000010f609ef8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,4181}, {total_heap_size,6765}, {links,[<0.3943.0>,#Port<0.31498>]}, {memory,55144}, {message_queue_len,0}, {reductions,4937}, {trap_exit,false}]}, {<0.28354.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcf8bf0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59390">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010fcf8c08 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fcf8c18 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437129}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010fcf8c48 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28352.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f927588 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59392">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010f9275a0 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f9275b0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437140}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010f9275e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.28351.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5ac878 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59391">>,<<"y(1) #Port<0.10708>">>,<<>>, <<"0x000000010f5ac890 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f5ac8a0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,437134}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10708>">>,<<"y(4) <0.3943.0>">>,<<>>, <<"0x000000010f5ac8d0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3943.0>]}, {memory,13688}, {message_queue_len,0}, {reductions,844}, {trap_exit,false}]}, {<0.14272.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dd8dee0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_port_server">>, <<"(3) {state,#Port<0.16803>,moxi,{[\"2012-11-13 10:36:43: (cproxy_config.c.326) env: MOXI">>, <<"y(4) <0.14272.0>">>,<<"y(5) <0.14271.0>">>,<<>>, <<"0x000000010dd8df18 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.14271.0>,#Port<0.16803>]}, {memory,13656}, {message_queue_len,0}, {reductions,887}, {trap_exit,true}]}, {<0.14271.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dc32c20 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_cushion">>, <<"y(3) {state,moxi,5000,{1352,831803,907349},<0.14272.0>,10000}">>, <<"y(4) <0.14271.0>">>,<<"y(5) <0.3967.0>">>,<<>>, <<"0x000000010dc32c58 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,1597}, {total_heap_size,4181}, {links,[<0.3967.0>,<0.14272.0>]}, {memory,34328}, {message_queue_len,0}, {reductions,767}, {trap_exit,true}]}, {<0.14268.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010963bc00 (unknown function)">>, <<"CP: 0x000000010963bc08 ()">>, <<"arity = 3">>,<<" menelaus_web">>, <<" handle_streaming_wakeup">>, <<" [#Fun,{mochiweb_request,#Port<0.16817>,'GET',\"/pools/de">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,657}, {total_heap_size,657}, {links,[<0.3943.0>,#Port<0.16817>]}, {memory,6424}, {message_queue_len,0}, {reductions,5938}, {trap_exit,false}]}, {<0.13773.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe10850 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59395">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fe10868 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe10878 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831953,757975}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fe108a8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,1287}, {trap_exit,false}]}, {<0.12928.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bed7e8 (prim_inet:recv0/3 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b7cf9b0 Return addr 0x000000010b9ad1b0 (mc_connection:read_full_message/2 + 88)">>, <<"y(0) 43036">>,<<"y(1) #Port<0.14211>">>,<<>>, <<"0x000000010b7cf9c8 Return addr 0x000000010b9ada20 (mc_connection:run_loop/2 + 72)">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>, <<"y(4) #Fun">>, <<"y(5) #Port<0.14211>">>,<<>>, <<"0x000000010b7cfa00 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) #Port<0.14211>">>,<<>>, <<"0x000000010b7cfa18 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,579}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<0.3965.0>,#Port<0.14211>]}, {memory,34328}, {message_queue_len,0}, {reductions,185989}, {trap_exit,false}]}, {<0.12927.0>, [{registered_name,menelaus_web_alerts_srv}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a9095d0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) menelaus_web_alerts_srv">>, <<"(3) {state,[],[],{dict,2,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[">>, <<"y(4) menelaus_web_alerts_srv">>, <<"y(5) <0.3942.0>">>,<<>>, <<"0x000000010a909608 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,10}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.3942.0>,<0.66.0>]}, {memory,29448}, {message_queue_len,0}, {reductions,2736}, {trap_exit,false}]}, {<0.4019.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bed7e8 (prim_inet:recv0/3 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcc21a8 Return addr 0x000000010b9ad1b0 (mc_connection:read_full_message/2 + 88)">>, <<"y(0) 6521">>,<<"y(1) #Port<0.10722>">>,<<>>, <<"0x000000010fcc21c0 Return addr 0x000000010b9ada20 (mc_connection:run_loop/2 + 72)">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>, <<"y(4) #Fun">>, <<"y(5) #Port<0.10722>">>,<<>>, <<"0x000000010fcc21f8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) #Port<0.10722>">>,<<>>, <<"0x000000010fcc2210 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,160}]}, {heap_size,987}, {total_heap_size,2584}, {links,[<0.3965.0>,#Port<0.10722>]}, {memory,21552}, {message_queue_len,0}, {reductions,94294}, {trap_exit,false}]}, {<0.4011.0>, [{registered_name,samples_loader_tasks}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5c1998 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) samples_loader_tasks">>, <<"y(3) {state,[],undefined}">>, <<"y(4) samples_loader_tasks">>,<<"y(5) <0.3887.0>">>, <<>>, <<"0x000000010f5c19d0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,987}, {total_heap_size,1974}, {links,[<0.3887.0>]}, {memory,16632}, {message_queue_len,0}, {reductions,5720}, {trap_exit,true}]}, {<0.4008.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a77e3a0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.52098>}">>, <<"y(2) <0.4007.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010a77e3c8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.4007.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.4007.0>, [{registered_name,set_view_update_daemon}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010db408e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) set_view_update_daemon">>, <<"y(3) {state,5000,5000,5000,{1352831957984273,#Ref<0.0.1.229693>}}">>, <<"y(4) set_view_update_daemon">>, <<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010db40920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,138}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<0.3887.0>,<0.4008.0>]}, {memory,601080}, {message_queue_len,0}, {reductions,203986}, {trap_exit,false}]}, {<0.4006.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b6feff0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.52092>}">>, <<"y(2) <0.4004.0>">>,<<"y(3) ns_node_disco_events">>, <<>>, <<"0x000000010b6ff018 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.4004.0>,<0.3893.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.4004.0>, [{registered_name,xdc_rdoc_replication_srv}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e985cf0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) xdc_rdoc_replication_srv">>, <<"y(3) {state,[],[{doc,<<24 bytes>>,{0,<<0 bytes>>},[[<<2858 bytes>>]],0,false,[]}]}">>, <<"y(4) xdc_rdoc_replication_srv">>, <<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010e985d28 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.3887.0>,<0.4006.0>]}, {memory,29448}, {message_queue_len,0}, {reductions,2486}, {trap_exit,false}]}, {<0.4003.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099b06c8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.52066>}">>, <<"y(2) <0.4002.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x00000001099b06f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.4002.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.4002.0>, [{registered_name,compaction_daemon}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010bb95c48 (gen_fsm:loop/7 + 272)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e3b08e0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) compaction_daemon">>, <<"(3) {state,[],undefined,undefined,undefined,{1352831960241287,#Ref<0.0.1.228544>},{dic">>, <<"y(4) idle">>,<<"y(5) compaction_daemon">>, <<"y(6) <0.4000.0>">>,<<>>, <<"0x000000010e3b0920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,87}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<0.4003.0>,<0.4000.0>]}, {memory,601080}, {message_queue_len,0}, {reductions,243274}, {trap_exit,true}]}, {<0.4000.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010aa14bc8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_cushion">>, <<"y(3) {state,compaction_daemon,3000,{1352,829701,932539},<0.4002.0>,1000}">>, <<"y(4) <0.4000.0>">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010aa14c00 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3887.0>,<0.4002.0>]}, {memory,8776}, {message_queue_len,0}, {reductions,689}, {trap_exit,true}]}, {<0.3999.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dd7d020 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.52032>}">>, <<"y(2) <0.3998.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010dd7d048 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3998.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3998.0>, [{registered_name,ns_moxi_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010aa01460 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,ns_moxi_sup},one_for_one,[],undefined,20,10,[],ns_moxi_sup,[]}">>, <<"y(4) ns_moxi_sup">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010aa01498 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links,[<0.3887.0>,<0.3999.0>]}, {memory,142568}, {message_queue_len,0}, {reductions,2403}, {trap_exit,true}]}, {<0.3997.0>, [{registered_name,ns_moxi_sup_work_queue}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e0f58e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) work_queue">>,<<"y(3) []">>, <<"y(4) ns_moxi_sup_work_queue">>, <<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010e0f5920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<0.3887.0>]}, {memory,601040}, {message_queue_len,0}, {reductions,7342}, {trap_exit,false}]}, {<0.3996.0>, [{registered_name,'stats_reader-@system'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe14d88 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) stats_reader">>, <<"y(3) {state,\"@system\"}">>, <<"y(4) 'stats_reader-@system'">>, <<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010fe14dc0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,455}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3887.0>]}, {memory,8736}, {message_queue_len,0}, {reductions,93118}, {trap_exit,false}]}, {<0.3995.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dd8ed30 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.52027>}">>, <<"y(2) <0.3994.0>">>,<<"y(3) ns_stats_event">>,<<>>, <<"0x000000010dd8ed58 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3994.0>,<0.3912.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3994.0>, [{registered_name,'stats_archiver-@system'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fd03c70 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) stats_archiver">>, <<"y(3) {state,\"@system\"}">>, <<"y(4) 'stats_archiver-@system'">>, <<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010fd03ca8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,58}]}, {heap_size,2584}, {total_heap_size,3194}, {links,[<0.3887.0>,<0.3995.0>,<0.66.0>]}, {memory,26472}, {message_queue_len,0}, {reductions,11864756}, {trap_exit,false}]}, {<0.3993.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dd9af88 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.52002>}">>, <<"y(2) <0.3985.0>">>,<<"y(3) ns_tick_event">>,<<>>, <<"0x000000010dd9afb0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3985.0>,<0.3907.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3985.0>, [{registered_name,system_stats_collector}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c3e89a0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) system_stats_collector">>, <<"(3) {state,#Port<0.10715>,[{cpu_local_ms,927041660},{cpu_idle_ms,806597180},{swap_tota">>, <<"y(4) system_stats_collector">>, <<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010c3e89d8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,50}]}, {heap_size,10946}, {total_heap_size,28657}, {links,[<0.3887.0>,<0.3993.0>,#Port<0.10715>]}, {memory,230176}, {message_queue_len,0}, {reductions,3435603}, {trap_exit,false}]}, {<0.3984.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010aa23a18 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51973>}">>, <<"y(2) <0.3983.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010aa23a40 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3983.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3983.0>, [{registered_name,ns_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ab96960 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_bucket_sup},one_for_one,[{child,<0.3984.0>,buckets_observing_subs">>, <<"y(4) ns_bucket_sup">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010ab96998 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links,[<0.3984.0>,<0.3887.0>]}, {memory,142568}, {message_queue_len,0}, {reductions,1482}, {trap_exit,true}]}, {<0.3982.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f00f60 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_event_sup">>, <<"y(3) {couch_db_update,{couch_db_update_notifier,#Ref<0.0.0.51970>}}">>, <<"y(4) <0.3982.0>">>,<<"y(5) <0.3981.0>">>,<<>>, <<"0x0000000109f00f98 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,610}, {total_heap_size,610}, {links,[<0.3981.0>,<0.3711.0>]}, {memory,5760}, {message_queue_len,0}, {reductions,32}, {trap_exit,false}]}, {<0.3981.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010e2a50d0 (couch_changes:wait_db_updated/3 + 56)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"x000000010dd93048 Return addr 0x000000010e2a3e28 (couch_changes:keep_sending_changes/9 + 8">>, <<"y(0) ok">>, <<"y(1) #Fun">>, <<"y(2) infinity">>,<<>>, <<"x000000010dd93068 Return addr 0x000000010e2a7098 (couch_changes:'-handle_changes/3-fun-1-'">>, <<"y(0) 999999999999999">>,<<"y(1) ok">>, <<"y(2) <<0 bytes>>">>,<<"y(3) 1">>, <<"y(4) #Fun">>, <<"y(5) infinity">>, <<"(6) {db,<0.3311.0>,<0.3312.0>,nil,<<16 bytes>>,<0.3308.0>,<0.3313.0>,{db_header,11,1,<">>, <<"y(7) #Fun">>, <<"(8) {changes_args,\"continuous\",fwd,0,1000000000000000,main_only,undefined,infinity,[],">>, <<"y(9) [sys_db]">>,<<"y(10) []">>, <<"y(11) \"continuous\"">>,<<>>, <<"x000000010dd930d0 Return addr 0x000000010e26b5c0 (xdc_rep_manager:'-changes_feed_loop/0-fu">>, <<"y(0) Catch 0x000000010e2a7100 (couch_changes:'-handle_changes/3-fun-1-'/5 + 800)">>, <<"y(1) []">>,<<"y(2) []">>,<<"y(3) []">>, <<"y(4) []">>,<<"y(5) []">>,<<"y(6) <0.3982.0>">>, <<>>, <<"0x000000010dd93110 Return addr 0x000000010963bc08 ()">>, <<"(0) {db,<0.3311.0>,<0.3312.0>,nil,<<16 bytes>>,<0.3308.0>,<0.3313.0>,{db_header,11,1,<">>, <<"y(1) []">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3980.0>,<0.3982.0>]}, {memory,13552}, {message_queue_len,0}, {reductions,632}, {trap_exit,false}]}, {<0.3980.0>, [{registered_name,xdc_rep_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe2fdc8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) xdc_rep_manager">>, <<"y(3) {rep_db_state,<0.3981.0>,<<11 bytes>>}">>, <<"y(4) xdc_rep_manager">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010fe2fe00 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,46}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.3887.0>,<0.3981.0>]}, {memory,29448}, {message_queue_len,0}, {reductions,30964}, {trap_exit,false}]}, {<0.3979.0>, [{registered_name,xdc_replication_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcfccb8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,xdc_replication_sup},one_for_one,[],undefined,3,10,[],xdc_replicatio">>, <<"y(4) xdc_replication_sup">>,<<"y(5) <0.3887.0>">>, <<>>, <<"0x000000010fcfccf0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,54}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.3887.0>]}, {memory,11752}, {message_queue_len,0}, {reductions,14392}, {trap_exit,true}]}, {<0.3977.0>, [{registered_name,ns_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5a32b0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) work_queue">>,<<"y(3) []">>, <<"y(4) ns_bucket_worker">>,<<"y(5) <0.3887.0>">>, <<>>, <<"0x000000010f5a32e8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,1597}, {total_heap_size,3194}, {links,[<0.3887.0>]}, {memory,26392}, {message_queue_len,0}, {reductions,4152}, {trap_exit,false}]}, {<0.3975.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c67d920 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51877>}">>, <<"y(2) <0.3973.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010c67d948 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3973.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3974.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e1888e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) 39003">>, <<"y(2) ns_memcached_log_rotator">>, <<"(3) {state,\"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/log">>, <<"y(4) <0.3974.0>">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010e188920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,22}]}, {heap_size,46368}, {total_heap_size,46745}, {links,[<0.3887.0>]}, {memory,374800}, {message_queue_len,0}, {reductions,109704}, {trap_exit,false}]}, {<0.3973.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010b0354c0 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010d8002b0 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links,[<0.3887.0>,<0.3975.0>]}, {memory,142464}, {message_queue_len,0}, {reductions,1195}, {trap_exit,false}]}, {<0.3972.0>, [{registered_name,ns_port_memcached}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ebe1b40 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_port_server">>, <<"(3) {state,#Port<0.10711>,memcached,{[\"Tue Nov 13 10:36:43.366118 PST 3: Shutting down">>, <<"y(4) <0.3972.0>">>,<<"y(5) <0.3971.0>">>,<<>>, <<"0x000000010ebe1b78 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,4181}, {total_heap_size,8362}, {links,[<0.3971.0>,#Port<0.10711>]}, {memory,67776}, {message_queue_len,0}, {reductions,3165}, {trap_exit,true}]}, {<0.3971.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c675c20 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_cushion">>, <<"y(3) {state,memcached,5000,{1352,829701,907223},<0.3972.0>,60000}">>, <<"y(4) <0.3971.0>">>,<<"y(5) <0.3967.0>">>,<<>>, <<"0x000000010c675c58 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,2584}, {total_heap_size,6765}, {links,[<0.3967.0>,<0.3972.0>]}, {memory,55000}, {message_queue_len,0}, {reductions,869}, {trap_exit,true}]}, {<0.3968.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010b0354c0 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c676b10 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3967.0>,<0.3874.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,11}, {trap_exit,false}]}, {<0.3967.0>, [{registered_name,ns_port_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010d145870 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_port_sup},one_for_one,[{child,<0.14271.0>,{moxi,\"/Users/farshid/D">>, <<"y(4) ns_port_sup">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010d1458a8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,75025}, {total_heap_size,103682}, {links,[<0.3968.0>,<0.3971.0>,<0.14271.0>,<0.3887.0>]}, {memory,830416}, {message_queue_len,0}, {reductions,9752}, {trap_exit,true}]}, {<0.3966.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fdb7708 Return addr 0x000000010b9cd180 (inet_tcp:accept/1 + 40)">>, <<"y(0) 38351">>,<<"y(1) #Port<0.10709>">>,<<>>, <<"0x000000010fdb7720 Return addr 0x000000010e257cb0 (mc_tcp_listener:accept_loop/1 + 56)">>, <<"y(0) []">>,<<>>, <<"0x000000010fdb7730 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) #Port<0.10709>">>,<<>>, <<"0x000000010fdb7748 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,987}, {total_heap_size,1974}, {links,[#Port<0.10709>,<0.3963.0>]}, {memory,16744}, {message_queue_len,0}, {reductions,3531}, {trap_exit,false}]}, {<0.3965.0>, [{registered_name,mc_conn_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000011023dfd0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mc_conn_sup},simple_one_for_one,[{child,undefined,mc_connection,{mc_">>, <<"y(4) mc_conn_sup">>,<<"y(5) <0.3963.0>">>,<<>>, <<"0x000000011023e008 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.4019.0>,<0.12928.0>,<0.3963.0>]}, {memory,3936}, {message_queue_len,0}, {reductions,119}, {trap_exit,true}]}, {<0.3964.0>, [{registered_name,mc_couch_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5c7638 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) mc_couch_events">>,<<"y(4) <0.3963.0>">>,<<>>, <<"0x000000010f5c7668 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,74}]}, {heap_size,2584}, {total_heap_size,5168}, {links,[<0.3963.0>]}, {memory,42184}, {message_queue_len,0}, {reductions,143317}, {trap_exit,true}]}, {<0.3963.0>, [{registered_name,mc_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c6d5888 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mc_sup},rest_for_one,[{child,<0.3966.0>,mc_tcp_listener,{mc_tcp_list">>, <<"y(4) mc_sup">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010c6d58c0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.3964.0>,<0.3965.0>,<0.3966.0>,<0.3887.0>]}, {memory,8856}, {message_queue_len,0}, {reductions,184}, {trap_exit,true}]}, {<0.3961.0>, [{registered_name,hot_keys_keeper}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001123004e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) hot_keys_keeper">>, <<"y(3) {state,[],[],<0.28818.0>}">>, <<"y(4) hot_keys_keeper">>,<<"y(5) <0.3942.0>">>,<<>>, <<"0x0000000112300520 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,16}]}, {heap_size,10946}, {total_heap_size,11556}, {links,[<0.3942.0>,<0.66.0>]}, {memory,93328}, {message_queue_len,0}, {reductions,8330}, {trap_exit,false}]}, {<0.3960.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010b0354c0 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b716538 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3893.0>,<0.3909.0>,<0.3942.0>,<0.3908.0>,<0.3874.0>]}, {memory,2760}, {message_queue_len,0}, {reductions,35}, {trap_exit,false}]}, {<0.3943.0>, [{registered_name,menelaus_web}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a92a090 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mochiweb_socket_server">>, <<"(3) {mochiweb_socket_server,8091,#Fun,{local,menelaus_web},2">>, <<"y(4) menelaus_web">>,<<"y(5) <0.3942.0>">>,<<>>, <<"0x000000010a92a0c8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,114}]}, {heap_size,1597}, {total_heap_size,4181}, {links, [<0.28360.0>,<0.28611.0>,<0.28758.0>,<0.28825.0>,<0.28828.0>, <0.28686.0>,<0.28364.0>,<0.28372.0>,<0.28542.0>,<0.28365.0>, <0.28361.0>,<0.28363.0>,<0.28352.0>,<0.28356.0>,<0.28358.0>, <0.28359.0>,<0.28354.0>,<0.14268.0>,<0.28351.0>,<0.3942.0>, #Port<0.10708>]}, {memory,35088}, {message_queue_len,0}, {reductions,895165}, {trap_exit,true}]}, {<0.3942.0>, [{registered_name,menelaus_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ab9ec18 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,menelaus_sup},one_for_one,[{child,<0.12927.0>,menelaus_web_alerts_sr">>, <<"y(4) menelaus_sup">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010ab9ec50 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,4181}, {total_heap_size,32838}, {links,[<0.3943.0>,<0.3961.0>,<0.12927.0>,<0.3960.0>,<0.3887.0>]}, {memory,263704}, {message_queue_len,0}, {reductions,3070}, {trap_exit,true}]}, {<0.3941.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f0e110 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51719>}">>, <<"y(2) <0.3940.0>">>, <<"y(3) master_activity_events">>,<<>>, <<"0x0000000109f0e138 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3940.0>,<0.3936.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3940.0>, [{registered_name,master_activity_events_keeper}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010963bc00 (unknown function)">>, <<"CP: 0x000000010963bc08 ()">>, <<"arity = 3">>,<<" proc_lib">>,<<" wake_up">>, <<" [gen_server,wake_hib,[<0.3887.0>,master_activity_events_keeper,{state,{[{{1352,831803,36">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,135}]}, {heap_size,20914}, {total_heap_size,20914}, {links,[<0.3887.0>,<0.3941.0>]}, {memory,168192}, {message_queue_len,0}, {reductions,19022}, {trap_exit,false}]}, {<0.3939.0>, [{registered_name,master_activity_events_pids_watcher}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010aa33f28 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) master_activity_events_pids_watcher">>, <<"y(3) []">>, <<"y(4) master_activity_events_pids_watcher">>, <<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010aa33f60 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3887.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,27}, {trap_exit,false}]}, {<0.3938.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010aa337e0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51709>}">>, <<"y(2) <0.3887.0>">>, <<"y(3) master_activity_events_ingress">>,<<>>, <<"0x000000010aa33808 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3887.0>,<0.3937.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3937.0>, [{registered_name,master_activity_events_ingress}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b80f320 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.51709>,{state,#Fun>, <<"y(3) master_activity_events_ingress">>, <<"y(4) <0.3887.0>">>,<<>>, <<"0x000000010b80f350 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,17}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.3887.0>,<0.3938.0>]}, {memory,24568}, {message_queue_len,0}, {reductions,3912}, {trap_exit,true}]}, {<0.3936.0>, [{registered_name,master_activity_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e5621c0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.51719>,{state,#Fun>, <<"y(3) master_activity_events">>, <<"y(4) <0.3887.0>">>,<<>>, <<"0x000000010e5621f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,10}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.3887.0>,<0.3941.0>]}, {memory,24568}, {message_queue_len,0}, {reductions,2114}, {trap_exit,true}]}, {<0.3935.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010cfeaf08 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) auto_failover">>, <<"y(3) {state,undefined,nil,30,0}">>, <<"y(4) auto_failover">>,<<"y(5) <0.3928.0>">>,<<>>, <<"0x000000010cfeaf40 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links,[<0.3928.0>]}, {memory,142600}, {message_queue_len,0}, {reductions,1697}, {trap_exit,false}]}, {<0.3934.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcbfff8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_tick">>,<<"y(3) {state,1352831954892}">>, <<"y(4) ns_tick">>,<<"y(5) <0.3928.0>">>,<<>>, <<"0x000000010fcc0030 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,281}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.3928.0>,<0.66.0>]}, {memory,6984}, {message_queue_len,0}, {reductions,45377}, {trap_exit,false}]}, {<0.3929.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010bb95c48 (gen_fsm:loop/7 + 272)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010d6208e0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_orchestrator">>,<<"y(3) {idle_state,[]}">>, <<"y(4) idle">>,<<"y(5) ns_orchestrator">>, <<"y(6) <0.3928.0>">>,<<>>, <<"0x000000010d620920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,141}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<0.3928.0>,<0.66.0>]}, {memory,601152}, {message_queue_len,0}, {reductions,186786}, {trap_exit,true}]}, {<0.3928.0>, [{registered_name,mb_master_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010aa33078 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mb_master_sup},one_for_one,[{child,<0.3935.0>,auto_failover,{auto_fa">>, <<"y(4) mb_master_sup">>,<<"y(5) <0.3926.0>">>,<<>>, <<"0x000000010aa330b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,987}, {total_heap_size,1974}, {links,[<0.3929.0>,<0.3934.0>,<0.3935.0>,<0.3926.0>]}, {memory,16752}, {message_queue_len,0}, {reductions,2996}, {trap_exit,true}]}, {<0.3927.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f38ba28 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51553>}">>, <<"y(2) <0.3926.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010f38ba50 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3926.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3926.0>, [{registered_name,mb_master}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010bb95c48 (gen_fsm:loop/7 + 272)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a93a3b0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mb_master">>, <<"y(3) {state,<0.3928.0>,'ns_1@127.0.0.1',['ns_1@127.0.0.1'],{1352,829701,890105}}">>, <<"y(4) master">>,<<"y(5) mb_master">>, <<"y(6) <0.3887.0>">>,<<>>, <<"0x000000010a93a3f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,112}]}, {heap_size,377}, {total_heap_size,1364}, {links,[<0.3887.0>,<0.3927.0>,<0.3928.0>,<0.66.0>]}, {memory,11872}, {message_queue_len,0}, {reductions,32708}, {trap_exit,true}]}, {<0.3919.0>, [{registered_name,remote_clusters_info}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010d3218e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) remote_clusters_info">>, <<"(3) {state,\"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/rem">>, <<"y(4) remote_clusters_info">>,<<"y(5) <0.3887.0>">>, <<>>, <<"0x000000010d321920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,18}]}, {heap_size,46368}, {total_heap_size,47355}, {links,[<0.3887.0>]}, {memory,379680}, {message_queue_len,0}, {reductions,36017}, {trap_exit,false}]}, {<0.3917.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f27c18 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51397>}">>, <<"y(2) <0.3916.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x0000000109f27c40 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3916.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3916.0>, [{registered_name,ns_doctor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e8468e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_doctor">>, <<"(3) {state,{dict,1,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[">>, <<"y(4) ns_doctor">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010e846920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,177}]}, {heap_size,46368}, {total_heap_size,92736}, {links,[<0.3887.0>,<0.3917.0>,<0.66.0>]}, {memory,742808}, {message_queue_len,0}, {reductions,548379}, {trap_exit,false}]}, {<0.3915.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a758f88 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51392>}">>, <<"y(2) <0.3913.0>">>,<<"y(3) buckets_events">>,<<>>, <<"0x000000010a758fb0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3913.0>,<0.3909.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3913.0>, [{registered_name,ns_heart}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcbf420 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_heart">>, <<"y(3) {state,undefined,<0.3915.0>}">>, <<"y(4) ns_heart">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010fcbf458 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,894}]}, {heap_size,6765}, {total_heap_size,53133}, {links,[<0.3887.0>,<0.3915.0>,<0.66.0>]}, {memory,425984}, {message_queue_len,0}, {reductions,1910451}, {trap_exit,true}]}, {<0.3912.0>, [{registered_name,ns_stats_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b80a250 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.52027>,{state,#Fun,ignored},<">>, <<"y(3) ns_stats_event">>,<<"y(4) <0.3887.0>">>,<<>>, <<"0x000000010b80a280 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,912}]}, {heap_size,4181}, {total_heap_size,8362}, {links,[<0.3995.0>,<0.3887.0>]}, {memory,67776}, {message_queue_len,0}, {reductions,452667}, {trap_exit,true}]}, {<0.3911.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010b0354c0 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010d0b26e0 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3910.0>,<0.3890.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,11}, {trap_exit,false}]}, {<0.3910.0>, [{registered_name,ns_mail_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010d0b2df8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_mail_sup},one_for_all,[{child,<0.3911.0>,ns_mail_log,{ns_mail_log">>, <<"y(4) ns_mail_sup">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010d0b2e30 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3887.0>,<0.3911.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,601}, {trap_exit,true}]}, {<0.3909.0>, [{registered_name,buckets_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5c9c78 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,menelaus_event,buckets_events,{state,undefined,[{<0.28372.0>,#Ref<0.0.1.">>, <<"y(3) buckets_events">>,<<"y(4) <0.3887.0>">>,<<>>, <<"0x000000010f5c9ca8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,12}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3915.0>,<0.3960.0>,<0.3887.0>]}, {memory,8960}, {message_queue_len,0}, {reductions,2554}, {trap_exit,true}]}, {<0.3908.0>, [{registered_name,mb_master_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f62ba60 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,menelaus_event,mb_master_events,{state,undefined,[{<0.28372.0>,#Ref<0.0.">>, <<"y(3) mb_master_events">>,<<"y(4) <0.3887.0>">>, <<>>, <<"0x000000010f62ba90 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,13}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.3887.0>,<0.3960.0>]}, {memory,11936}, {message_queue_len,0}, {reductions,2557}, {trap_exit,true}]}, {<0.3907.0>, [{registered_name,ns_tick_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe51660 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.52002>,{state,#Fun,ignored},<">>, <<"y(3) ns_tick_event">>,<<"y(4) <0.3887.0>">>,<<>>, <<"0x000000010fe51690 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,82}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.3993.0>,<0.3887.0>]}, {memory,16672}, {message_queue_len,0}, {reductions,36499}, {trap_exit,true}]}, {<0.3905.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a93f598 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51343>}">>, <<"y(2) <0.3903.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010a93f5c0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3903.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3903.0>, [{registered_name,vbucket_map_mirror}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fdf4e60 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) work_queue">>,<<"y(3) []">>, <<"y(4) vbucket_map_mirror">>,<<"y(5) <0.3887.0>">>, <<>>, <<"0x000000010fdf4e98 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<0.3887.0>,<0.3905.0>]}, {memory,34328}, {message_queue_len,0}, {reductions,2542}, {trap_exit,false}]}, {<0.3901.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c67ab88 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51268>}">>, <<"y(2) <0.3900.0>">>, <<"y(3) ns_config_events_local">>,<<>>, <<"0x000000010c67abb0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3900.0>,<0.3875.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3900.0>, [{registered_name,ns_config_rep}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ffa28e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config_rep">>,<<"y(3) {state}">>, <<"y(4) ns_config_rep">>,<<"y(5) <0.3892.0>">>,<<>>, <<"0x000000010ffa2920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,46368}, {total_heap_size,47355}, {links,[<0.3893.0>,<0.3901.0>,<0.3892.0>]}, {memory,379760}, {message_queue_len,0}, {reductions,17066}, {trap_exit,false}]}, {<0.3899.0>, [{registered_name,ns_config_rep_merger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010e087688 (ns_config_rep:merger_loop/0 + 56)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c6789b8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) {1352,829701,873561}">>,<<>>, <<"0x000000010c6789d0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3892.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,16}, {trap_exit,false}]}, {<0.3898.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010b0354c0 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c64d190 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3892.0>,<0.3874.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,11}, {trap_exit,false}]}, {<0.3897.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010e07b910 (ns_node_disco_log:'-start_link/0-fun-0-'/0 + 80)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c678280 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3892.0>,<0.3893.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,10}, {trap_exit,false}]}, {<0.3894.0>, [{registered_name,ns_node_disco}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ec3d8e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_node_disco">>, <<"y(3) {state,['ns_1@127.0.0.1'],false,undefined}">>, <<"y(4) ns_node_disco">>,<<"y(5) <0.3892.0>">>,<<>>, <<"0x000000010ec3d920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,632}]}, {heap_size,46368}, {total_heap_size,46745}, {links,[<0.3892.0>,<0.66.0>]}, {memory,374840}, {message_queue_len,0}, {reductions,779094}, {trap_exit,false}]}, {<0.3893.0>, [{registered_name,ns_node_disco_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b70c7c0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.52092>,{state,#Fun>, <<"y(3) ns_node_disco_events">>,<<"y(4) <0.3892.0>">>, <<>>, <<"0x000000010b70c7f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,1597}, {total_heap_size,3194}, {links,[<0.3960.0>,<0.4006.0>,<0.3897.0>,<0.3900.0>,<0.3892.0>]}, {memory,26696}, {message_queue_len,0}, {reductions,3213}, {trap_exit,true}]}, {<0.3892.0>, [{registered_name,ns_node_disco_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c67a420 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_node_disco_sup},rest_for_one,[{child,<0.3900.0>,ns_config_rep,{ns">>, <<"y(4) ns_node_disco_sup">>,<<"y(5) <0.3887.0>">>, <<>>, <<"0x000000010c67a458 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,233}, {total_heap_size,610}, {links, [<0.3897.0>,<0.3899.0>,<0.3900.0>,<0.3898.0>,<0.3893.0>, <0.3894.0>,<0.3887.0>]}, {memory,5960}, {message_queue_len,0}, {reductions,826}, {trap_exit,true}]}, {<0.3891.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c643e60 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51219>}">>, <<"y(2) <0.3889.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010c643e88 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3889.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3890.0>, [{registered_name,ns_log_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e7b38f0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"y(2) [{handler,ns_mail_log,false,{state},<0.3911.0>}]">>, <<"y(3) ns_log_events">>,<<"y(4) <0.3887.0>">>,<<>>, <<"0x000000010e7b3920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,46368}, {total_heap_size,46601}, {links,[<0.3887.0>,<0.3911.0>]}, {memory,373688}, {message_queue_len,0}, {reductions,6984}, {trap_exit,true}]}, {<0.3889.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010b0354c0 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c650f60 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3887.0>,<0.3891.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,16}, {trap_exit,false}]}, {<0.3888.0>, [{registered_name,ns_log}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ead18a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_log">>, <<"(3) {state,[{log_entry,{1352,829380,869050},'ns_1@127.0.0.1',ns_cookie_manager,3,[73,1">>, <<"y(4) ns_log">>,<<"y(5) <0.3887.0>">>,<<>>, <<"0x000000010ead18e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,17711}, {total_heap_size,46368}, {links,[<0.66.0>,<0.3887.0>]}, {memory,371824}, {message_queue_len,0}, {reductions,6973}, {trap_exit,true}]}, {<0.3887.0>, [{registered_name,ns_server_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010d892b18 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_server_sup},one_for_one,[{child,<0.4011.0>,samples_loader_tasks,{">>, <<"y(4) ns_server_sup">>,<<"y(5) <0.196.0>">>,<<>>, <<"0x000000010d892b50 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,6}]}, {heap_size,46368}, {total_heap_size,50549}, {links, [<0.3936.0>,<0.3973.0>,<0.3996.0>,<0.4004.0>,<0.4007.0>, <0.4011.0>,<0.3998.0>,<0.4000.0>,<0.3997.0>,<0.3980.0>, <0.3985.0>,<0.3994.0>,<0.3983.0>,<0.3977.0>,<0.3979.0>, <0.3974.0>,<0.3940.0>,<0.3963.0>,<0.3967.0>,<0.3942.0>, <0.3938.0>,<0.3939.0>,<0.3937.0>,<0.3908.0>,<0.3913.0>, <0.3919.0>,<0.3926.0>,<0.3916.0>,<0.3910.0>,<0.3912.0>, <0.3909.0>,<0.3890.0>,<0.3903.0>,<0.3907.0>,<0.3892.0>, <0.3888.0>,<0.3889.0>,<0.196.0>]}, {memory,406712}, {message_queue_len,0}, {reductions,11847}, {trap_exit,true}]}, {<0.3886.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c63f640 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_process_registry">>, <<"y(3) {state,vbucket_filter_changes_registry,4337732}">>, <<"y(4) <0.3886.0>">>,<<"y(5) <0.196.0>">>,<<>>, <<"0x000000010c63f678 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.196.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,28}, {trap_exit,true}]}, {<0.3885.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c642858 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51192>}">>, <<"y(2) <0.3884.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010c642880 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3884.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3884.0>, [{registered_name,cb_config_couch_sync}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b8d18e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) cb_config_couch_sync">>,<<"y(3) {state}">>, <<"y(4) cb_config_couch_sync">>,<<"y(5) <0.3884.0>">>, <<>>, <<"0x000000010b8d1920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,46368}, {total_heap_size,75025}, {links,[<0.3885.0>]}, {memory,601040}, {message_queue_len,0}, {reductions,4017}, {trap_exit,false}]}, {<0.3883.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c606d68 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51189>}">>, <<"y(2) <0.3882.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010c606d90 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3882.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3882.0>, [{registered_name,ns_config_log}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ac83358 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config_log">>, <<"y(3) {state,[{configs,[]}]}">>, <<"y(4) ns_config_log">>,<<"y(5) <0.3873.0>">>,<<>>, <<"0x000000010ac83390 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,21}]}, {heap_size,17711}, {total_heap_size,35422}, {links,[<0.3873.0>,<0.3883.0>]}, {memory,284256}, {message_queue_len,0}, {reductions,85458}, {trap_exit,false}]}, {<0.3880.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010dfb7980 (ns_pubsub:do_subscribe_link/4 + 392)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c605eb8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"y(1) {ns_pubsub,#Ref<0.0.0.51139>}">>, <<"y(2) <0.3879.0>">>,<<"y(3) ns_config_events">>, <<>>, <<"0x000000010c605ee0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3879.0>,<0.3874.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,21}, {trap_exit,true}]}, {<0.3879.0>, [{registered_name,ns_config_isasl_sync}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fc378e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config_isasl_sync">>, <<"(3) {state,[],\"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/">>, <<"y(4) ns_config_isasl_sync">>,<<"y(5) <0.3873.0>">>, <<>>, <<"0x000000010fc37920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,46368}, {total_heap_size,47965}, {links,[<0.3873.0>,<0.3880.0>]}, {memory,384600}, {message_queue_len,0}, {reductions,14707}, {trap_exit,false}]}, {<0.3878.0>, [{registered_name,ns_config_remote}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c605750 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config_replica">>,<<"y(3) {state}">>, <<"y(4) ns_config_remote">>,<<"y(5) <0.3873.0>">>, <<>>, <<"0x000000010c605788 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3873.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,26}, {trap_exit,false}]}, {<0.3876.0>, [{registered_name,ns_config}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f624aa0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config">>, <<"(3) {config,{full,\"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-19">>, <<"y(4) ns_config">>,<<"y(5) <0.3873.0>">>,<<>>, <<"0x000000010f624ad8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,93}]}, {heap_size,10946}, {total_heap_size,85971}, {links,[<0.3873.0>]}, {memory,688608}, {message_queue_len,0}, {reductions,310055}, {trap_exit,true}]}, {<0.3875.0>, [{registered_name,ns_config_events_local}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e489eb0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.51268>,{state,#Fun,ignored},<">>, <<"y(3) ns_config_events_local">>, <<"y(4) <0.3873.0>">>,<<>>, <<"0x000000010e489ee0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.3873.0>,<0.3901.0>]}, {memory,8776}, {message_queue_len,0}, {reductions,279}, {trap_exit,true}]}, {<0.3874.0>, [{registered_name,ns_config_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f930298 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.52098>,{state,#Fun,ignored},<">>, <<"y(3) ns_config_events">>,<<"y(4) <0.3873.0>">>, <<>>, <<"0x000000010f9302c8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,13}]}, {heap_size,4181}, {total_heap_size,32838}, {links, [<0.3917.0>,<0.3975.0>,<0.4003.0>,<0.4008.0>,<0.3984.0>, <0.3999.0>,<0.3960.0>,<0.3968.0>,<0.3927.0>,<0.3885.0>, <0.3898.0>,<0.3905.0>,<0.3891.0>,<0.3880.0>,<0.3883.0>, <0.3873.0>]}, {memory,264288}, {message_queue_len,0}, {reductions,74239}, {trap_exit,true}]}, {<0.3873.0>, [{registered_name,ns_config_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c6a30c8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_config_sup},rest_for_one,[{child,<0.3884.0>,cb_config_couch_sync,">>, <<"y(4) ns_config_sup">>,<<"y(5) <0.196.0>">>,<<>>, <<"0x000000010c6a3100 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,17711}, {total_heap_size,21892}, {links, [<0.3876.0>,<0.3879.0>,<0.3882.0>,<0.3878.0>,<0.3874.0>, <0.3875.0>,<0.196.0>]}, {memory,176216}, {message_queue_len,0}, {reductions,3461}, {trap_exit,true}]}, {<0.3801.0>, [{registered_name,mnesia_late_loader}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010ded1e90 (mnesia_late_loader:loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099aff48 Return addr 0x000000010de923d8 (mnesia_sp:init_proc/4 + 240)">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) {state,<0.3771.0>}">>,<<>>, <<"0x00000001099aff68 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) Catch 0x000000010de923d8 (mnesia_sp:init_proc/4 + 240)">>, <<"y(1) mnesia_late_loader">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) [<0.3771.0>]">>,<<>>, <<"0x00000001099aff98 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.3771.0>]}, {memory,16632}, {message_queue_len,0}, {reductions,674}, {trap_exit,false}]}, {<0.3800.0>, [{registered_name,mnesia_controller}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e904708 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_controller">>, <<"(3) {state,<0.3771.0>,true,[],[],{0,nil},[],[],{0,nil},undefined,[],[],{interval,#Ref<">>, <<"y(4) mnesia_controller">>,<<"y(5) <0.3771.0>">>, <<>>, <<"0x000000010e904740 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,610}, {total_heap_size,3194}, {links,[<0.3771.0>,<0.66.0>]}, {memory,26512}, {message_queue_len,0}, {reductions,4603}, {trap_exit,true}]}, {<0.3799.0>, [{registered_name,mnesia_snmp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099a8f58 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_snmp_sup},simple_one_for_one,[{child,undefined,mnesia_snmp_su">>, <<"y(4) mnesia_snmp_sup">>,<<"y(5) <0.3771.0>">>,<<>>, <<"0x00000001099a8f90 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3771.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,59}, {trap_exit,true}]}, {<0.3798.0>, [{registered_name,mnesia_checkpoint_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099b1c60 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_checkpoint_sup},simple_one_for_one,[{child,undefined,mnesia_c">>, <<"y(4) mnesia_checkpoint_sup">>, <<"y(5) <0.3771.0>">>,<<>>, <<"0x00000001099b1c98 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3771.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,59}, {trap_exit,true}]}, {<0.3796.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5bf930 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,240,{arg,latest_log,undefined,\"/Users/farshid/Libra">>, <<>>, <<"0x000000010f5bf940 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,424}]}, {heap_size,2584}, {total_heap_size,6765}, {links,[<0.68.0>,<0.3772.0>,<0.67.0>,#Port<0.31344>]}, {memory,55080}, {message_queue_len,0}, {reductions,401745}, {trap_exit,true}]}, {<0.3776.0>, [{registered_name,mnesia_tm}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010de9c898 (mnesia_tm:doit_loop/1 + 200)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe437a8 Return addr 0x000000010de923d8 (mnesia_sp:init_proc/4 + 240)">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) {state,{0,nil},{0,nil},<0.3771.0>,[],[],[]}">>, <<"y(3) []">>,<<"y(4) []">>,<<"y(5) <0.3771.0>">>, <<"y(6) {0,nil}">>,<<"y(7) {0,nil}">>,<<>>, <<"0x000000010fe437f0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) Catch 0x000000010de923d8 (mnesia_sp:init_proc/4 + 240)">>, <<"y(1) mnesia_tm">>,<<"y(2) []">>,<<"y(3) []">>, <<"y(4) [<0.3771.0>]">>,<<>>, <<"0x000000010fe43820 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,424}]}, {heap_size,1597}, {total_heap_size,12543}, {links,[<0.3771.0>]}, {memory,101184}, {message_queue_len,0}, {reductions,1340168}, {trap_exit,true}]}, {<0.3775.0>, [{registered_name,mnesia_recover}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fce4f80 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_recover">>, <<"y(3) {state,<0.3771.0>,undefined,undefined,undefined,0,false,true,[]}">>, <<"y(4) mnesia_recover">>,<<"y(5) <0.3771.0>">>,<<>>, <<"0x000000010fce4fb8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,17711}, {total_heap_size,28657}, {links,[<0.3771.0>,<0.66.0>]}, {memory,230136}, {message_queue_len,0}, {reductions,11374}, {trap_exit,true}]}, {<0.3774.0>, [{registered_name,mnesia_locker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010de4bf90 (mnesia_locker:loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe27810 Return addr 0x000000010de923d8 (mnesia_sp:init_proc/4 + 240)">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) []">>, <<"y(5) {state,<0.3771.0>}">>,<<>>, <<"0x000000010fe27848 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) Catch 0x000000010de923d8 (mnesia_sp:init_proc/4 + 240)">>, <<"y(1) mnesia_locker">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) [<0.3771.0>]">>,<<>>, <<"0x000000010fe27878 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1127}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.3771.0>]}, {memory,16632}, {message_queue_len,0}, {reductions,597264}, {trap_exit,true}]}, {<0.3773.0>, [{registered_name,mnesia_subscr}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099c2c20 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_subscr">>, <<"y(3) {state,<0.3771.0>,4264000}">>, <<"y(4) mnesia_subscr">>,<<"y(5) <0.3771.0>">>,<<>>, <<"0x00000001099c2c58 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3770.0>,<0.3771.0>,<0.3759.0>]}, {memory,2784}, {message_queue_len,0}, {reductions,111}, {trap_exit,true}]}, {<0.3772.0>, [{registered_name,mnesia_monitor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe57a48 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_monitor">>, <<"y(3) {state,<0.3771.0>,[],[],true,[],undefined,[]}">>, <<"y(4) mnesia_monitor">>,<<"y(5) <0.3771.0>">>,<<>>, <<"0x000000010fe57a80 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,57}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.3796.0>,<0.3771.0>]}, {memory,29448}, {message_queue_len,0}, {reductions,11577}, {trap_exit,true}]}, {<0.3771.0>, [{registered_name,mnesia_kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe4e460 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_kernel_sup},one_for_all,[{child,<0.3801.0>,mnesia_late_loader">>, <<"y(4) mnesia_kernel_sup">>,<<"y(5) <0.3769.0>">>, <<>>, <<"0x000000010fe4e498 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.3774.0>,<0.3799.0>,<0.3800.0>,<0.3801.0>,<0.3776.0>, <0.3798.0>,<0.3775.0>,<0.3772.0>,<0.3773.0>,<0.3769.0>]}, {memory,7232}, {message_queue_len,0}, {reductions,551}, {trap_exit,true}]}, {<0.3770.0>, [{registered_name,mnesia_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099c24d0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"y(2) [{handler,mnesia_event,false,{state,[],false,[]},false}]">>, <<"y(3) mnesia_event">>,<<"y(4) <0.3769.0>">>,<<>>, <<"0x00000001099c2500 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.3769.0>,<0.3773.0>]}, {memory,13656}, {message_queue_len,0}, {reductions,421}, {trap_exit,true}]}, {<0.3769.0>, [{registered_name,mnesia_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b7151c8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_sup},one_for_all,[{child,<0.3771.0>,mnesia_kernel_sup,{mnesia">>, <<"y(4) mnesia_sup">>,<<"y(5) <0.3768.0>">>,<<>>, <<"0x000000010b715200 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.3770.0>,<0.3771.0>,<0.3768.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,195}, {trap_exit,true}]}, {<0.3768.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a758840 Return addr 0x000000010963bc08 ()">>, <<"y(0) {normal,[]}">>,<<"y(1) mnesia_sup">>, <<"y(2) <0.3769.0>">>,<<"y(3) <0.3767.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3767.0>,<0.3769.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,32}, {trap_exit,true}]}, {<0.3767.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a775cd0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.3768.0>,{appl_data,mnesia,[mnesia_dumper_load_regulator,mnesia_event,mne">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x000000010a775cf0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.7.0>,<0.3768.0>]}, {memory,13656}, {message_queue_len,0}, {reductions,80}, {trap_exit,true}]}, {<0.3761.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dd8b418 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,277,{arg,couch_disk_logger,undefined,\"/Users/farshi">>, <<>>, <<"0x000000010dd8b428 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,2584}, {total_heap_size,6765}, {links,[<0.6.0>,<0.67.0>,<0.68.0>,#Port<0.10591>]}, {memory,55080}, {message_queue_len,0}, {reductions,6146}, {trap_exit,true}]}, {<0.3759.0>, [{registered_name,mb_mnesia}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe405f0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mb_mnesia">>, <<"y(3) {state,['ns_1@127.0.0.1']}">>, <<"y(4) mb_mnesia">>,<<"y(5) <0.3757.0>">>,<<>>, <<"0x000000010fe40628 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,13}]}, {heap_size,6765}, {total_heap_size,13530}, {links,[<0.3757.0>,<0.3773.0>]}, {memory,109120}, {message_queue_len,0}, {reductions,58160}, {trap_exit,true}]}, {<0.3758.0>, [{registered_name,mb_mnesia_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b820918 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) mb_mnesia_events">>,<<"y(4) <0.3757.0>">>, <<>>, <<"0x000000010b820948 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3757.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,24}, {trap_exit,true}]}, {<0.3757.0>, [{registered_name,mb_mnesia_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c626240 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mb_mnesia_sup},one_for_one,[{child,<0.3759.0>,mb_mnesia,{mb_mnesia,s">>, <<"y(4) mb_mnesia_sup">>,<<"y(5) <0.196.0>">>,<<>>, <<"0x000000010c626278 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.3758.0>,<0.3759.0>,<0.196.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,151}, {trap_exit,true}]}, {<0.3756.0>, [{registered_name,ns_cluster}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099d9960 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_cluster">>,<<"y(3) {state}">>, <<"y(4) ns_cluster">>,<<"y(5) <0.196.0>">>,<<>>, <<"0x00000001099d9998 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.196.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,26}, {trap_exit,false}]}, {<0.3755.0>, [{registered_name,ns_cookie_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f98b8e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_cookie_manager">>,<<"y(3) {state}">>, <<"y(4) ns_cookie_manager">>,<<"y(5) <0.196.0>">>, <<>>, <<"0x000000010f98b920 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,52}]}, {heap_size,46368}, {total_heap_size,46745}, {links,[<0.196.0>]}, {memory,374800}, {message_queue_len,0}, {reductions,59364}, {trap_exit,false}]}, {<0.3754.0>, [{registered_name,timeout_diag_logger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b71e078 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) timeout_diag_logger">>, <<"y(3) {state,1352829694862}">>, <<"y(4) timeout_diag_logger">>,<<"y(5) <0.196.0>">>, <<>>, <<"0x000000010b71e0b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.196.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,28}, {trap_exit,false}]}, {<0.3753.0>, [{registered_name,couch_log}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5e66d8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_event_sup">>, <<"y(3) {error_logger,couch_log}">>, <<"y(4) couch_log">>,<<"y(5) <0.3302.0>">>,<<>>, <<"0x000000010c5e6710 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3302.0>,<0.6.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,34}, {trap_exit,false}]}, {<0.3751.0>, [{registered_name,couch_uuids}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099de110 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_uuids">>, <<"y(3) {sequential,\"80503146a4a4cef15ea9c153f6\",751}">>, <<"y(4) couch_uuids">>,<<"y(5) <0.3719.0>">>,<<>>, <<"0x00000001099de148 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3719.0>]}, {memory,2776}, {message_queue_len,0}, {reductions,105}, {trap_exit,false}]}, {<0.3750.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5a6488 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59413">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010f5a64a0 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f5a64b0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83080}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010f5a64e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3749.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fdf8038 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59409">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fdf8050 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fdf8060 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83065}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fdf8090 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3748.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe469c0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59404">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fe469d8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe469e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83036}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fe46a18 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3747.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe32fa0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59410">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fe32fb8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe32fc8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83069}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fe32ff8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3746.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f700cc8 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59412">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010f700ce0 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f700cf0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83076}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010f700d20 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3745.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fce8158 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59403">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fce8170 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fce8180 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,81389}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fce81b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3744.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5b0648 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59400">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010f5b0660 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f5b0670 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,81354}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010f5b06a0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3743.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5ba818 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59408">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010f5ba830 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f5ba840 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83061}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010f5ba870 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3742.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fdfe428 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59414">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fdfe440 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fdfe450 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83083}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fdfe480 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3741.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a9397c0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59407">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010a9397d8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010a9397e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83057}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010a939818 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3739.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe4cdb0 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59406">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fe4cdc8 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe4cdd8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83053}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fe4ce08 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3738.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f6e7180 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59402">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010f6e7198 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f6e71a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,81383}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010f6e71d8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3737.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fdfb230 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59411">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fdfb248 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fdfb258 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83072}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fdfb288 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3736.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f6e3f88 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59401">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010f6e3fa0 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010f6e3fb0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,81377}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010f6e3fe0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3735.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe49bb8 Return addr 0x000000010b9cd2a0 (inet_tcp:accept/2 + 40)">>, <<"y(0) 59405">>,<<"y(1) #Port<0.10585>">>,<<>>, <<"0x000000010fe49bd0 Return addr 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(0) []">>,<<>>, <<"0x000000010fe49be0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) {1352,831954,83048}">>, <<"y(1) Catch 0x000000010b9ce028 (mochiweb_acceptor:init/3 + 168)">>, <<"y(2) #Fun">>, <<"y(3) #Port<0.10585>">>,<<"y(4) <0.3734.0>">>,<<>>, <<"0x000000010fe49c10 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,35}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.3734.0>]}, {memory,21584}, {message_queue_len,0}, {reductions,14498}, {trap_exit,false}]}, {<0.3734.0>, [{registered_name,couch_httpd}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a76bb28 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mochiweb_socket_server">>, <<"(3) {mochiweb_socket_server,8092,#Fun,{local,couch_httpd},20">>, <<"y(4) couch_httpd">>,<<"y(5) <0.3719.0>">>,<<>>, <<"0x000000010a76bb60 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,1597}, {total_heap_size,3194}, {links, [<0.3739.0>,<0.3744.0>,<0.3748.0>,<0.3750.0>,<0.13773.0>, <0.3749.0>,<0.3746.0>,<0.3747.0>,<0.3745.0>,<0.3742.0>, <0.3743.0>,<0.3741.0>,<0.3736.0>,<0.3738.0>,<0.3737.0>, <0.3719.0>,<0.3735.0>,#Port<0.10585>]}, {memory,27144}, {message_queue_len,0}, {reductions,756}, {trap_exit,true}]}, {<0.3733.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a760090 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_event_sup">>, <<"y(3) {couch_db_update,{couch_db_update_notifier,#Ref<0.0.0.50236>}}">>, <<"y(4) <0.3733.0>">>,<<"y(5) <0.3732.0>">>,<<>>, <<"0x000000010a7600c8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3732.0>,<0.3711.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,32}, {trap_exit,false}]}, {<0.3732.0>, [{registered_name,couch_view}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dc03608 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_view">>, <<"y(3) {server,\"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb\"}">>, <<"y(4) couch_view">>,<<"y(5) <0.3719.0>">>,<<>>, <<"0x000000010dc03640 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,105}]}, {heap_size,4181}, {total_heap_size,8362}, {links,[<0.3719.0>,<0.3733.0>]}, {memory,67776}, {message_queue_len,0}, {reductions,86461}, {trap_exit,true}]}, {<0.3731.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f318b08 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_event_sup">>, <<"y(3) {couch_db_update,{couch_db_update_notifier,#Ref<0.0.0.50234>}}">>, <<"y(4) <0.3731.0>">>,<<"y(5) <0.3730.0>">>,<<>>, <<"0x000000010f318b40 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3730.0>,<0.3711.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,32}, {trap_exit,false}]}, {<0.3730.0>, [{registered_name,couch_set_view_ddoc_cache}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f926288 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_set_view_ddoc_cache">>, <<"y(3) {state,1048576,730,<0.3731.0>}">>, <<"y(4) couch_set_view_ddoc_cache">>, <<"y(5) <0.3719.0>">>,<<>>, <<"0x000000010f9262c0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3719.0>,<0.3731.0>]}, {memory,8848}, {message_queue_len,0}, {reductions,2511}, {trap_exit,true}]}, {<0.3728.0>, [{registered_name,couch_query_servers}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b724478 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_query_servers">>, <<"(3) {qserver,4243501,4251695,4255792,4247598,[],{[{<<12 bytes>>,true},{<<7 bytes>>,300">>, <<"y(4) couch_query_servers">>,<<"y(5) <0.3719.0>">>, <<>>, <<"0x000000010b7244b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,1597}, {total_heap_size,2207}, {links,[<0.3719.0>]}, {memory,18568}, {message_queue_len,0}, {reductions,2374}, {trap_exit,true}]}, {<0.3727.0>, [{registered_name,couch_index_merger_connection_pool}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c60ea80 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) lhttpc_manager">>, <<"(3) {httpc_man,{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}">>, <<"y(4) couch_index_merger_connection_pool">>, <<"y(5) <0.3719.0>">>,<<>>, <<"0x000000010c60eab8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.3719.0>]}, {memory,3856}, {message_queue_len,0}, {reductions,82}, {trap_exit,false}]}, {<0.3726.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f2e5000 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_event_sup">>, <<"y(3) {couch_db_update,{couch_db_update_notifier,#Ref<0.0.0.50223>}}">>, <<"y(4) <0.3726.0>">>,<<"y(5) <0.3725.0>">>,<<>>, <<"0x000000010f2e5038 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3725.0>,<0.3711.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,32}, {trap_exit,false}]}, {<0.3725.0>, [{registered_name,couch_spatial}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f869ef8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_spatial">>, <<"(3) {spatial,\"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb\",0,">>, <<"y(4) couch_spatial">>,<<"y(5) <0.3719.0>">>,<<>>, <<"0x000000010f869f30 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,125}]}, {heap_size,2584}, {total_heap_size,5168}, {links,[<0.3719.0>,<0.3726.0>]}, {memory,42224}, {message_queue_len,0}, {reductions,104322}, {trap_exit,true}]}, {<0.3724.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5e37f8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_event_sup">>, <<"y(3) {couch_db_update,{couch_db_update_notifier,#Ref<0.0.0.50214>}}">>, <<"y(4) <0.3724.0>">>,<<"y(5) <0.3723.0>">>,<<>>, <<"0x000000010c5e3830 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3723.0>,<0.3711.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,32}, {trap_exit,false}]}, {<0.3723.0>, [{registered_name,couch_set_view}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f60f480 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_set_view">>, <<"(3) {server,\"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb\",<0.">>, <<"y(4) couch_set_view">>,<<"y(5) <0.3719.0>">>,<<>>, <<"0x000000010f60f4b8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,987}, {total_heap_size,5168}, {links,[<0.3724.0>,<0.3719.0>]}, {memory,42296}, {message_queue_len,0}, {reductions,10111}, {trap_exit,true}]}, {<0.3722.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5e11a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_event_sup">>, <<"y(3) {couch_db_update,{couch_db_update_notifier,#Ref<0.0.0.50210>}}">>, <<"y(4) <0.3722.0>">>,<<"y(5) <0.3721.0>">>,<<>>, <<"0x000000010c5e11e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3721.0>,<0.3711.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,32}, {trap_exit,false}]}, {<0.3721.0>, [{registered_name,couch_auth_cache}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c623c00 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_auth_cache">>, <<"y(3) {state,50,0,<0.3722.0>}">>, <<"y(4) couch_auth_cache">>,<<"y(5) <0.3719.0>">>, <<>>, <<"0x000000010c623c38 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,610}, {total_heap_size,1597}, {links,[<0.3719.0>,<0.3722.0>]}, {memory,13800}, {message_queue_len,0}, {reductions,579}, {trap_exit,true}]}, {<0.3720.0>, [{registered_name,couch_db_update_notifier_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5dfe78 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,couch_db_update_notifier_sup},one_for_one,[],undefined,10,3600,[],co">>, <<"y(4) couch_db_update_notifier_sup">>, <<"y(5) <0.3719.0>">>,<<>>, <<"0x000000010c5dfeb0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3719.0>]}, {memory,2776}, {message_queue_len,0}, {reductions,312}, {trap_exit,true}]}, {<0.3719.0>, [{registered_name,couch_secondary_services}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b71c600 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,couch_secondary_services},one_for_one,[{child,<0.3751.0>,uuids,{couc">>, <<"y(4) couch_secondary_services">>, <<"y(5) <0.3301.0>">>,<<>>, <<"0x000000010b71c638 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,12}]}, {heap_size,2584}, {total_heap_size,5168}, {links, [<0.3723.0>,<0.3730.0>,<0.3734.0>,<0.3751.0>,<0.3732.0>, <0.3727.0>,<0.3728.0>,<0.3725.0>,<0.3720.0>,<0.3721.0>, <0.3301.0>]}, {memory,42584}, {message_queue_len,0}, {reductions,13286}, {trap_exit,true}]}, {<0.3718.0>, [{registered_name,couch_replica_index_barrier}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5defc8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_index_barrier">>, <<"(3) {state,[],2,{[],[]},{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],">>, <<"y(4) couch_replica_index_barrier">>, <<"y(5) <0.3302.0>">>,<<>>, <<"0x000000010c5df000 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.3302.0>]}, {memory,5864}, {message_queue_len,0}, {reductions,85}, {trap_exit,false}]}, {<0.3717.0>, [{registered_name,couch_main_index_barrier}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5df720 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_index_barrier">>, <<"(3) {state,[],4,{[],[]},{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],">>, <<"y(4) couch_main_index_barrier">>, <<"y(5) <0.3302.0>">>,<<>>, <<"0x000000010c5df758 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3302.0>]}, {memory,2848}, {message_queue_len,0}, {reductions,81}, {trap_exit,false}]}, {<0.3713.0>, [{registered_name,couch_rep_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000110240d68 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,couch_rep_sup},one_for_one,[],undefined,3,10,[],couch_rep_sup,[]}">>, <<"y(4) couch_rep_sup">>,<<"y(5) <0.3302.0>">>,<<>>, <<"0x0000000110240da0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3302.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,41}, {trap_exit,true}]}, {<0.3712.0>, [{registered_name,couch_replication}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000011022d148 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) couch_replication">>,<<"y(4) <0.3302.0>">>, <<>>, <<"0x000000011022d178 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3302.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,20}, {trap_exit,true}]}, {<0.3711.0>, [{registered_name,couch_db_update}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f56ebc0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,couch_db_update_notifier,#Ref<0.0.0.51970>,#Fun>, <<"y(3) couch_db_update">>,<<"y(4) <0.3302.0>">>,<<>>, <<"0x000000010f56ebf0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,626}]}, {heap_size,987}, {total_heap_size,3571}, {links, [<0.3726.0>,<0.3733.0>,<0.3982.0>,<0.3731.0>,<0.3722.0>, <0.3724.0>,<0.3302.0>]}, {memory,29648}, {message_queue_len,0}, {reductions,83432}, {trap_exit,true}]}, {<0.3319.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5e30a0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_ref_counter">>, <<"(3) {srv,{dict,3,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],">>, <<"y(4) <0.3319.0>">>,<<"y(5) <0.3319.0>">>,<<>>, <<"0x000000010c5e30d8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.3314.0>]}, {memory,8952}, {message_queue_len,0}, {reductions,167}, {trap_exit,false}]}, {<0.3318.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f317090 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_db_updater">>, <<"(3) {db,<0.3317.0>,<0.3318.0>,nil,<<16 bytes>>,<0.3314.0>,<0.3319.0>,{db_header,11,1,<">>, <<"y(4) <0.3318.0>">>,<<"y(5) <0.3317.0>">>,<<>>, <<"0x000000010f3170c8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.3317.0>]}, {memory,6944}, {message_queue_len,0}, {reductions,488}, {trap_exit,true}]}, {<0.3317.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f3183b0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_db">>, <<"(3) {db,<0.3317.0>,<0.3318.0>,nil,<<16 bytes>>,<0.3314.0>,<0.3319.0>,{db_header,11,1,<">>, <<"y(4) <0.3317.0>">>,<<"y(5) <0.3307.0>">>,<<>>, <<"0x000000010f3183e8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3307.0>,<0.3318.0>]}, {memory,8848}, {message_queue_len,0}, {reductions,141}, {trap_exit,true}]}, {<0.3316.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010c58e088 (couch_file:writer_loop/4 + 232)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f314d18 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) 10">>,<<"y(1) 4175">>, <<"(2) \"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch">>, <<"y(3) []">>,<<>>, <<"0x000000010f314d40 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.3314.0>]}, {memory,6944}, {message_queue_len,0}, {reductions,605}, {trap_exit,true}]}, {<0.3315.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010c58f4d8 (couch_file:reader_loop/3 + 216)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c6228f8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) 10">>, <<"(1) \"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch">>, <<"y(2) []">>,<<>>, <<"0x000000010c622918 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3314.0>]}, {memory,8736}, {message_queue_len,0}, {reductions,1619}, {trap_exit,true}]}, {<0.3314.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109a0fd58 Return addr 0x000000010c5897a0 (couch_file:init/1 + 1008)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_file">>, <<"y(3) {file,<0.3315.0>,<0.3316.0>,4175}">>, <<"y(4) <0.3314.0>">>,<<"y(5) <0.3307.0>">>,<<>>, <<"0x0000000109a0fd90 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"(3) \"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch">>, <<"y(4) Catch 0x000000010c5897c0 (couch_file:init/1 + 1040)">>, <<>>, <<"0x0000000109a0fdc0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.3316.0>,<0.3319.0>,<0.3315.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,99}, {trap_exit,true}]}, {<0.3313.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c462a80 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_ref_counter">>, <<"(3) {srv,{dict,2,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[],">>, <<"y(4) <0.3313.0>">>,<<"y(5) <0.3313.0>">>,<<>>, <<"0x000000010c462ab8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,610}, {total_heap_size,1220}, {links,[<0.3308.0>]}, {memory,10744}, {message_queue_len,0}, {reductions,367}, {trap_exit,false}]}, {<0.3312.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f2e1258 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_db_updater">>, <<"(3) {db,<0.3311.0>,<0.3312.0>,nil,<<16 bytes>>,<0.3308.0>,<0.3313.0>,{db_header,11,1,<">>, <<"y(4) <0.3312.0>">>,<<"y(5) <0.3311.0>">>,<<>>, <<"0x000000010f2e1290 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.3311.0>]}, {memory,6944}, {message_queue_len,0}, {reductions,490}, {trap_exit,true}]}, {<0.3311.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c461760 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_db">>, <<"(3) {db,<0.3311.0>,<0.3312.0>,nil,<<16 bytes>>,<0.3308.0>,<0.3313.0>,{db_header,11,1,<">>, <<"y(4) <0.3311.0>">>,<<"y(5) <0.3307.0>">>,<<>>, <<"0x000000010c461798 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3307.0>,<0.3312.0>]}, {memory,8848}, {message_queue_len,0}, {reductions,218}, {trap_exit,true}]}, {<0.3310.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010c58e088 (couch_file:writer_loop/4 + 232)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f38b2d0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) 10">>,<<"y(1) 4175">>, <<"(2) \"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.">>, <<"y(3) []">>,<<>>, <<"0x000000010f38b2f8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.3308.0>]}, {memory,5792}, {message_queue_len,0}, {reductions,610}, {trap_exit,true}]}, {<0.3309.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010c58f4d8 (couch_file:reader_loop/3 + 216)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b6fc9b8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) 10">>, <<"(1) \"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.">>, <<"y(2) []">>,<<>>, <<"0x000000010b6fc9d8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.3308.0>]}, {memory,8736}, {message_queue_len,0}, {reductions,3127}, {trap_exit,true}]}, {<0.3308.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b815fa8 Return addr 0x000000010c5897a0 (couch_file:init/1 + 1008)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_file">>, <<"y(3) {file,<0.3309.0>,<0.3310.0>,4175}">>, <<"y(4) <0.3308.0>">>,<<"y(5) <0.3307.0>">>,<<>>, <<"0x000000010b815fe0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"(3) \"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.">>, <<"y(4) Catch 0x000000010c5897c0 (couch_file:init/1 + 1040)">>, <<>>, <<"0x000000010b816010 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.3310.0>,<0.3313.0>,<0.3309.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,121}, {trap_exit,true}]}, {<0.3307.0>, [{registered_name,couch_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e8e2938 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_server">>, <<"(3) {server,\"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb\",{re">>, <<"y(4) couch_server">>,<<"y(5) <0.3302.0>">>,<<>>, <<"0x000000010e8e2970 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,676}]}, {heap_size,4181}, {total_heap_size,15127}, {links,[<0.3311.0>,<0.3317.0>,<0.3302.0>]}, {memory,121936}, {message_queue_len,0}, {reductions,1182366}, {trap_exit,true}]}, {<0.3306.0>, [{registered_name,couch_file_write_guard}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dd79710 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_file_write_guard">>,<<"y(3) true">>, <<"y(4) couch_file_write_guard">>, <<"y(5) <0.3302.0>">>,<<>>, <<"0x000000010dd79748 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,40}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.3302.0>]}, {memory,29552}, {message_queue_len,0}, {reductions,3313}, {trap_exit,false}]}, {<0.3305.0>, [{registered_name,couch_task_status}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f596610 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_task_status">>,<<"y(3) nil">>, <<"y(4) couch_task_status">>,<<"y(5) <0.3302.0>">>, <<>>, <<"0x000000010f596648 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,66}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.3302.0>]}, {memory,24672}, {message_queue_len,0}, {reductions,53374}, {trap_exit,false}]}, {<0.3304.0>, [{registered_name,couch_task_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109a0b808 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) couch_task_events">>,<<"y(4) <0.3302.0>">>, <<>>, <<"0x0000000109a0b838 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.3302.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,28}, {trap_exit,true}]}, {<0.3303.0>, [{registered_name,couch_drv}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109a12c70 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_drv">>,<<"y(3) nil">>, <<"y(4) couch_drv">>,<<"y(5) <0.3302.0>">>,<<>>, <<"0x0000000109a12ca8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<0.3302.0>]}, {memory,8736}, {message_queue_len,0}, {reductions,76}, {trap_exit,false}]}, {<0.3302.0>, [{registered_name,couch_primary_services}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c5eb7a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,couch_primary_services},one_for_one,[{child,<0.3718.0>,couch_replica">>, <<"y(4) couch_primary_services">>, <<"y(5) <0.3301.0>">>,<<>>, <<"0x000000010c5eb7e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,61}]}, {heap_size,2584}, {total_heap_size,9349}, {links, [<0.3712.0>,<0.3717.0>,<0.3718.0>,<0.3753.0>,<0.3713.0>, <0.3305.0>,<0.3307.0>,<0.3711.0>,<0.3306.0>,<0.3303.0>, <0.3304.0>,<0.3301.0>]}, {memory,76072}, {message_queue_len,0}, {reductions,91221}, {trap_exit,true}]}, {<0.3301.0>, [{registered_name,couch_server_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109a0b0a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,couch_server_sup},one_for_all,[{child,<0.3719.0>,couch_secondary_ser">>, <<"y(4) couch_server_sup">>,<<"y(5) <0.197.0>">>,<<>>, <<"0x0000000109a0b0e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,2584}, {total_heap_size,2584}, {links,[<0.3300.0>,<0.3302.0>,<0.3719.0>,<0.197.0>]}, {memory,21704}, {message_queue_len,0}, {reductions,172}, {trap_exit,true}]}, {<0.3300.0>, [{registered_name,couch_config}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000011016ec18 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) couch_config">>, <<"(3) {config,[{<0.6.0>,#Fun},{<0.3301.0>,#Fun>, <<"y(4) couch_config">>,<<"y(5) <0.197.0>">>,<<>>, <<"0x000000011016ec50 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,17}]}, {heap_size,10946}, {total_heap_size,57314}, {links,[<0.3301.0>]}, {memory,460216}, {message_queue_len,0}, {reductions,54396}, {trap_exit,false}]}, {<0.338.0>, [{registered_name,dets}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fceb370 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) dets_server">>, <<"y(3) {state,86087,[<0.27.0>],[]}">>, <<"y(4) dets">>,<<"y(5) <0.27.0>">>,<<>>, <<"0x000000010fceb3a8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,11}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.27.0>]}, {memory,21512}, {message_queue_len,0}, {reductions,2935}, {trap_exit,true}]}, {<0.337.0>, [{registered_name,dets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b714a70 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,dets_sup},simple_one_for_one,[{child,undefined,dets,{dets,istart_lin">>, <<"y(4) dets_sup">>,<<"y(5) <0.27.0>">>,<<>>, <<"0x000000010b714aa8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.27.0>]}, {memory,24528}, {message_queue_len,0}, {reductions,1838}, {trap_exit,true}]}, {<0.310.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{net_kernel,ticker,2}}, {backtrace, [<<"Program counter: 0x000000010aa8b470 (net_kernel:ticker_loop/2 + 56)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099b23e8 Return addr 0x000000010963bc08 ()">>, <<"y(0) 15000">>,<<"y(1) <0.308.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.308.0>]}, {memory,2600}, {message_queue_len,0}, {reductions,345}, {trap_exit,false}]}, {<0.309.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,accept_loop,2}}, {backtrace, [<<"Program counter: 0x0000000109bec488 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099b1508 Return addr 0x000000010b9cd180 (inet_tcp:accept/1 + 40)">>, <<"y(0) 17">>,<<"y(1) #Port<0.5803>">>,<<>>, <<"0x00000001099b1520 Return addr 0x000000010ddab7b8 (inet_tcp_dist:accept_loop/2 + 96)">>, <<"y(0) []">>,<<>>, <<"0x00000001099b1530 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) #Port<0.5803>">>, <<"y(2) <0.308.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.308.0>]}, {memory,2672}, {message_queue_len,0}, {reductions,8}, {trap_exit,false}]}, {<0.308.0>, [{registered_name,net_kernel}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcfadd0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) net_kernel">>, <<"(3) {state,'ns_1@127.0.0.1','ns_1@127.0.0.1',longnames,{tick,<0.310.0>,15000},7000,sys">>, <<"y(4) net_kernel">>,<<"y(5) <0.305.0>">>,<<>>, <<"0x000000010fcfae08 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,9}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.305.0>,<0.309.0>,<0.310.0>,#Port<0.5803>]}, {memory,11912}, {message_queue_len,0}, {reductions,1991}, {trap_exit,true}]}, {<0.307.0>, [{registered_name,auth}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c43b7d0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>,<<"y(2) auth">>, <<"y(3) {state,bptrojzpwfmfrqou,61496}">>, <<"y(4) auth">>,<<"y(5) <0.305.0>">>,<<>>, <<"0x000000010c43b808 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.305.0>]}, {memory,8736}, {message_queue_len,0}, {reductions,288}, {trap_exit,true}]}, {<0.306.0>, [{registered_name,erl_epmd}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099a3730 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) erl_epmd">>, <<"y(3) {state,#Port<0.5805>,21100,ns_1}">>, <<"y(4) erl_epmd">>,<<"y(5) <0.305.0>">>,<<>>, <<"0x00000001099a3768 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.305.0>,#Port<0.5805>]}, {memory,2744}, {message_queue_len,0}, {reductions,127}, {trap_exit,false}]}, {<0.305.0>, [{registered_name,net_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099cdd58 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,net_sup},one_for_all,[{child,<0.308.0>,net_kernel,{net_kernel,start_">>, <<"y(4) net_sup">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x00000001099cdd90 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.306.0>,<0.307.0>,<0.308.0>,<0.11.0>]}, {memory,8856}, {message_queue_len,0}, {reductions,249}, {trap_exit,true}]}, {<0.304.0>, [{registered_name,inet_gethost_native}, {status,waiting}, {initial_call,{inet_gethost_native,server_init,2}}, {backtrace, [<<"Program counter: 0x000000010dda4510 (inet_gethost_native:main_loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f79b518 Return addr 0x000000010963bc08 ()">>, <<"y(0) {state,#Port<0.5789>,8000,53302,57399,<0.303.0>,4,{statistics,0,0,0,0,0,0,0,0}}">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,383}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.303.0>,#Port<0.5789>]}, {memory,24568}, {message_queue_len,0}, {reductions,26570}, {trap_exit,true}]}, {<0.303.0>, [{registered_name,inet_gethost_native_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b724ea8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_bridge">>, <<"y(3) {state,inet_gethost_native,<0.304.0>,<0.304.0>,{local,inet_gethost_native_sup}}">>, <<"y(4) inet_gethost_native_sup">>, <<"y(5) <0.27.0>">>,<<>>, <<"0x000000010b724ee0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.27.0>,<0.304.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,41}, {trap_exit,true}]}, {<0.302.0>, [{registered_name,dist_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f0b368 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) dist_manager">>, <<"y(3) {state,true,\"127.0.0.1\"}">>, <<"y(4) dist_manager">>,<<"y(5) <0.196.0>">>,<<>>, <<"0x0000000109f0b3a0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,9}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.196.0>]}, {memory,21512}, {message_queue_len,0}, {reductions,4220}, {trap_exit,false}]}, {<0.233.0>, [{registered_name,mochiweb_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a7630f8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,mochiweb_sup},one_for_one,[],undefined,10,10,[],mochiweb_sup,[]}">>, <<"y(4) mochiweb_sup">>,<<"y(5) <0.232.0>">>,<<>>, <<"0x000000010a763130 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.232.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,41}, {trap_exit,true}]}, {<0.232.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a7629c0 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) mochiweb_app">>, <<"y(2) <0.233.0>">>,<<"y(3) <0.231.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.231.0>,<0.233.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,49}, {trap_exit,true}]}, {<0.231.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a75ded8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.232.0>,{appl_data,mochiweb,[],undefined,{mochiweb_app,[]},[mochihex,moch">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x000000010a75def8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.232.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.229.0>, [{registered_name,ssl_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b6f20a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ssl_server">>, <<"(3) {st,#Port<0.4875>,27503,false,[],[],63482,0,\"OpenSSL 0.9.8r 8 Feb 2011\",\"OpenSSL 0">>, <<"y(4) ssl_server">>,<<"y(5) <0.219.0>">>,<<>>, <<"0x000000010b6f20e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.219.0>,#Port<0.4875>]}, {memory,29448}, {message_queue_len,0}, {reductions,1347}, {trap_exit,true}]}, {<0.227.0>, [{registered_name,lhttpc_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099c61b8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) lhttpc_manager">>, <<"(3) {httpc_man,{dict,0,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]}">>, <<"y(4) lhttpc_manager">>,<<"y(5) <0.226.0>">>,<<>>, <<"0x00000001099c61f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.226.0>]}, {memory,3856}, {message_queue_len,0}, {reductions,106}, {trap_exit,false}]}, {<0.226.0>, [{registered_name,lhttpc_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a75d010 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,lhttpc_sup},one_for_one,[{child,<0.227.0>,lhttpc_manager,{lhttpc_man">>, <<"y(4) lhttpc_sup">>,<<"y(5) <0.225.0>">>,<<>>, <<"0x000000010a75d048 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.225.0>,<0.227.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,121}, {trap_exit,true}]}, {<0.225.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a75d788 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) lhttpc">>, <<"y(2) <0.226.0>">>,<<"y(3) <0.224.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.224.0>,<0.226.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,49}, {trap_exit,true}]}, {<0.224.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a75c178 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.225.0>,{appl_data,lhttpc,[lhttpc_manager],undefined,{lhttpc,nil},[],[],i">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x000000010a75c198 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.225.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.222.0>, [{registered_name,ssl_connection_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a75c8b8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ssl_connection_sup},simple_one_for_one,[{child,undefined,undefined,{">>, <<"y(4) ssl_connection_sup">>,<<"y(5) <0.219.0>">>, <<>>, <<"0x000000010a75c8f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.219.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,52}, {trap_exit,true}]}, {<0.221.0>, [{registered_name,ssl_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a76f760 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ssl_manager">>, <<"(3) {state,32794,ssl_session_cache,8640,[20503,24600,28697],#Ref<0.0.0.451>,undefined}">>, <<"y(4) ssl_manager">>,<<"y(5) <0.219.0>">>,<<>>, <<"0x000000010a76f798 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.219.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,54}, {trap_exit,true}]}, {<0.220.0>, [{registered_name,ssl_broker_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a76f008 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ssl_broker_sup},simple_one_for_one,[{child,undefined,ssl_broker,{ssl">>, <<"y(4) ssl_broker_sup">>,<<"y(5) <0.219.0>">>,<<>>, <<"0x000000010a76f040 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.219.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,52}, {trap_exit,true}]}, {<0.219.0>, [{registered_name,ssl_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a75e618 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ssl_sup},one_for_all,[{child,<0.229.0>,ssl_server,{ssl_server,start_">>, <<"y(4) ssl_sup">>,<<"y(5) <0.218.0>">>,<<>>, <<"0x000000010a75e650 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.220.0>,<0.222.0>,<0.229.0>,<0.221.0>,<0.218.0>]}, {memory,5880}, {message_queue_len,0}, {reductions,311}, {trap_exit,true}]}, {<0.218.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099dc6b8 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) ssl_app">>, <<"y(2) <0.219.0>">>,<<"y(3) <0.217.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.217.0>,<0.219.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,49}, {trap_exit,true}]}, {<0.217.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a759b38 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.218.0>,{appl_data,ssl,[ssl_sup,ssl_server,ssl_broker_sup],undefined,{ssl">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x000000010a759b58 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.218.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.214.0>, [{registered_name,tftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a76e8b0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,tftp_sup},one_for_one,[],undefined,10,3600,[],tftp_sup,[[]]}">>, <<"y(4) tftp_sup">>,<<"y(5) <0.207.0>">>,<<>>, <<"0x000000010a76e8e8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.207.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<0.213.0>, [{registered_name,httpd_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099dfaa0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,httpd_sup},one_for_one,[],undefined,10,3600,[],httpd_sup,[[]]}">>, <<"y(4) httpd_sup">>,<<"y(5) <0.207.0>">>,<<>>, <<"0x00000001099dfad8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.207.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,43}, {trap_exit,true}]}, {<0.212.0>, [{registered_name,httpc_handler_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a76c6d0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,httpc_handler_sup},simple_one_for_one,[{child,undefined,undefined,{h">>, <<"y(4) httpc_handler_sup">>,<<"y(5) <0.209.0>">>, <<>>, <<"0x000000010a76c708 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.209.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,52}, {trap_exit,true}]}, {<0.211.0>, [{registered_name,httpc_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a76d580 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) httpc_manager">>, <<"(3) {state,[],httpc_manager__handler_db,{cookie_db,undefined,16406},httpc_manager__ses">>, <<"y(4) httpc_manager">>,<<"y(5) <0.210.0>">>,<<>>, <<"0x000000010a76d5b8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.210.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,78}, {trap_exit,true}]}, {<0.210.0>, [{registered_name,httpc_profile_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a76ce28 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,httpc_profile_sup},one_for_one,[{child,<0.211.0>,httpc_manager,{http">>, <<"y(4) httpc_profile_sup">>,<<"y(5) <0.209.0>">>, <<>>, <<"0x000000010a76ce60 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.209.0>,<0.211.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,122}, {trap_exit,true}]}, {<0.209.0>, [{registered_name,httpc_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099df348 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,httpc_sup},one_for_one,[{child,<0.212.0>,httpc_handler_sup,{httpc_ha">>, <<"y(4) httpc_sup">>,<<"y(5) <0.207.0>">>,<<>>, <<"0x00000001099df380 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.210.0>,<0.212.0>,<0.207.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,171}, {trap_exit,true}]}, {<0.208.0>, [{registered_name,ftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a77cc40 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ftp_sup},simple_one_for_one,[{child,undefined,undefined,{ftp,start_l">>, <<"y(4) ftp_sup">>,<<"y(5) <0.207.0>">>,<<>>, <<"0x000000010a77cc78 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.207.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,52}, {trap_exit,true}]}, {<0.207.0>, [{registered_name,inets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099db368 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,inets_sup},one_for_one,[{child,<0.214.0>,tftp_sup,{tftp_sup,start_li">>, <<"y(4) inets_sup">>,<<"y(5) <0.206.0>">>,<<>>, <<"0x00000001099db3a0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.208.0>,<0.213.0>,<0.214.0>,<0.209.0>,<0.206.0>]}, {memory,5880}, {message_queue_len,0}, {reductions,315}, {trap_exit,true}]}, {<0.206.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a7717b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) inets_app">>, <<"y(2) <0.207.0>">>,<<"y(3) <0.205.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.205.0>,<0.207.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<0.205.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099dbf58 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.206.0>,{appl_data,inets,[inets_sup,httpc_manager],undefined,{inets_app,[">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00000001099dbf78 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.7.0>,<0.206.0>]}, {memory,3896}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<0.202.0>, [{registered_name,crypto_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a771038 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) crypto_server">>,<<"y(3) []">>, <<"y(4) crypto_server">>,<<"y(5) <0.201.0>">>,<<>>, <<"0x000000010a771070 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.201.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,26}, {trap_exit,false}]}, {<0.201.0>, [{registered_name,crypto_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099debf0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,crypto_sup},one_for_all,[{child,<0.202.0>,crypto_server,{crypto_serv">>, <<"y(4) crypto_sup">>,<<"y(5) <0.200.0>">>,<<>>, <<"0x00000001099dec28 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.200.0>,<0.202.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,102}, {trap_exit,true}]}, {<0.200.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a77c508 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) crypto_app">>, <<"y(2) <0.201.0>">>,<<"y(3) <0.199.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.199.0>,<0.201.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,49}, {trap_exit,true}]}, {<0.199.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099c4748 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.200.0>,{appl_data,crypto,[crypto_sup,crypto_server],undefined,{crypto_ap">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00000001099c4768 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.200.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.197.0>, [{registered_name,cb_couch_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001101595f8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,cb_couch_sup},one_for_one,[{child,<0.3301.0>,couch_app,{couch_app,st">>, <<"y(4) cb_couch_sup">>,<<"y(5) <0.196.0>">>,<<>>, <<"0x0000000110159630 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,10946}, {total_heap_size,21892}, {links,[<0.196.0>,<0.3301.0>]}, {memory,176016}, {message_queue_len,0}, {reductions,8664}, {trap_exit,true}]}, {<0.196.0>, [{registered_name,ns_server_cluster_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010c4353d0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_server_cluster_sup},one_for_one,[{child,<0.3887.0>,ns_server_sup,">>, <<"y(4) ns_server_cluster_sup">>,<<"y(5) <0.63.0>">>, <<>>, <<"0x000000010c435408 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,4181}, {total_heap_size,32838}, {links, [<0.3754.0>,<0.3873.0>,<0.3886.0>,<0.3887.0>,<0.3756.0>, <0.3757.0>,<0.3755.0>,<0.197.0>,<0.302.0>,<0.63.0>]}, {memory,263904}, {message_queue_len,0}, {reductions,203315}, {trap_exit,true}]}, {<0.171.0>, [{registered_name,'sink-stderr'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f8efc88 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_stderr_sink">>,<<"y(3) {state}">>, <<"y(4) 'sink-stderr'">>,<<"y(5) <0.35.0>">>,<<>>, <<"0x000000010f8efcc0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,509}]}, {heap_size,28657}, {total_heap_size,35422}, {links,[<0.35.0>]}, {memory,284216}, {message_queue_len,0}, {reductions,237009}, {trap_exit,false}]}, {<0.95.0>, [{registered_name,'sink-ns_log'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f87fd58 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_log_sink">>,<<"y(3) {state}">>, <<"y(4) 'sink-ns_log'">>,<<"y(5) <0.35.0>">>,<<>>, <<"0x000000010f87fd90 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,10946}, {total_heap_size,11556}, {links,[<0.35.0>]}, {memory,93288}, {message_queue_len,0}, {reductions,952}, {trap_exit,false}]}, {<0.93.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e50db90 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,69,{arg,'sink-disk_stats',2,\"/Users/farshid/Library">>, <<>>, <<"0x000000010e50dba0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,15}]}, {heap_size,987}, {total_heap_size,1974}, {links,[<0.67.0>,<0.92.0>,<0.68.0>,#Port<0.3388>]}, {memory,16752}, {message_queue_len,0}, {reductions,11281}, {trap_exit,true}]}, {<0.92.0>, [{registered_name,'sink-disk_stats'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f13e870 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_stats',\"/Users/farshid/Library/Application Support/Couchbase/var">>, <<"y(4) 'sink-disk_stats'">>,<<"y(5) <0.35.0>">>,<<>>, <<"0x000000010f13e8a8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,16}]}, {heap_size,75025}, {total_heap_size,75402}, {links,[<0.35.0>,<0.93.0>]}, {memory,604096}, {message_queue_len,0}, {reductions,58439}, {trap_exit,true}]}, {<0.90.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f13658 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,0,{arg,'sink-disk_xdcr_errors',2,\"/Users/farshid/Li">>, <<>>, <<"0x0000000109f13668 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<0.67.0>,<0.89.0>,<0.68.0>,#Port<0.3383>]}, {memory,34408}, {message_queue_len,0}, {reductions,1771}, {trap_exit,true}]}, {<0.89.0>, [{registered_name,'sink-disk_xdcr_errors'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109efa6c0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_xdcr_errors',\"/Users/farshid/Library/Application Support/Couchba">>, <<"y(4) 'sink-disk_xdcr_errors'">>, <<"y(5) <0.35.0>">>,<<>>, <<"0x0000000109efa6f8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.35.0>,<0.90.0>]}, {memory,6912}, {message_queue_len,0}, {reductions,295}, {trap_exit,true}]}, {<0.87.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109efc5d0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,10,{arg,'sink-disk_xdcr',2,\"/Users/farshid/Library/">>, <<>>, <<"0x0000000109efc5e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,6}]}, {heap_size,987}, {total_heap_size,1974}, {links,[<0.67.0>,<0.86.0>,<0.68.0>,#Port<0.3378>]}, {memory,16752}, {message_queue_len,0}, {reductions,2484}, {trap_exit,true}]}, {<0.86.0>, [{registered_name,'sink-disk_xdcr'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b721280 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_xdcr',\"/Users/farshid/Library/Application Support/Couchbase/var/">>, <<"y(4) 'sink-disk_xdcr'">>,<<"y(5) <0.35.0>">>,<<>>, <<"0x000000010b7212b8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,6}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.35.0>,<0.87.0>]}, {memory,16672}, {message_queue_len,0}, {reductions,1108}, {trap_exit,true}]}, {<0.84.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a930948 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,3657,{arg,'sink-disk_debug',2,\"/Users/farshid/Libra">>, <<>>, <<"0x000000010a930958 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,13}]}, {heap_size,987}, {total_heap_size,5168}, {links,[<0.67.0>,<0.83.0>,<0.68.0>,#Port<0.3373>]}, {memory,42304}, {message_queue_len,0}, {reductions,101371}, {trap_exit,true}]}, {<0.83.0>, [{registered_name,'sink-disk_debug'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f8b7cf0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_debug',\"/Users/farshid/Library/Application Support/Couchbase/var">>, <<"y(4) 'sink-disk_debug'">>,<<"y(5) <0.35.0>">>,<<>>, <<"0x000000010f8b7d28 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,507}]}, {heap_size,28657}, {total_heap_size,35422}, {links,[<0.35.0>,<0.84.0>]}, {memory,284256}, {message_queue_len,0}, {reductions,372929}, {trap_exit,true}]}, {<0.81.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f08198 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,0,{arg,'sink-disk_couchdb',2,\"/Users/farshid/Librar">>, <<>>, <<"0x0000000109f081a8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<0.67.0>,<0.80.0>,<0.68.0>,#Port<0.3368>]}, {memory,34408}, {message_queue_len,0}, {reductions,1745}, {trap_exit,true}]}, {<0.80.0>, [{registered_name,'sink-disk_couchdb'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ac985a0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_couchdb',\"/Users/farshid/Library/Application Support/Couchbase/v">>, <<"y(4) 'sink-disk_couchdb'">>,<<"y(5) <0.35.0>">>, <<>>, <<"0x000000010ac985d8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.35.0>,<0.81.0>]}, {memory,8776}, {message_queue_len,0}, {reductions,300}, {trap_exit,true}]}, {<0.78.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a77bdb8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,0,{arg,'sink-disk_mapreduce_errors',2,\"/Users/farsh">>, <<>>, <<"0x000000010a77bdc8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<0.67.0>,<0.77.0>,<0.68.0>,#Port<0.3363>]}, {memory,34408}, {message_queue_len,0}, {reductions,1789}, {trap_exit,true}]}, {<0.77.0>, [{registered_name,'sink-disk_mapreduce_errors'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ac9ee10 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_mapreduce_errors',\"/Users/farshid/Library/Application Support/Co">>, <<"y(4) 'sink-disk_mapreduce_errors'">>, <<"y(5) <0.35.0>">>,<<>>, <<"0x000000010ac9ee48 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.35.0>,<0.78.0>]}, {memory,6912}, {message_queue_len,0}, {reductions,307}, {trap_exit,true}]}, {<0.75.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010dcd2fb0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,396,{arg,'sink-disk_views',2,\"/Users/farshid/Librar">>, <<>>, <<"0x000000010dcd2fc0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,4181}, {total_heap_size,10946}, {links,[<0.67.0>,<0.74.0>,<0.68.0>,#Port<0.3358>]}, {memory,88528}, {message_queue_len,0}, {reductions,12120}, {trap_exit,true}]}, {<0.74.0>, [{registered_name,'sink-disk_views'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ee266c8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_views',\"/Users/farshid/Library/Application Support/Couchbase/var">>, <<"y(4) 'sink-disk_views'">>,<<"y(5) <0.35.0>">>,<<>>, <<"0x000000010ee26700 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,102}]}, {heap_size,6765}, {total_heap_size,7142}, {links,[<0.35.0>,<0.75.0>]}, {memory,58016}, {message_queue_len,0}, {reductions,20517}, {trap_exit,true}]}, {<0.72.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e58bcf8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,304,{arg,'sink-disk_error',2,\"/Users/farshid/Librar">>, <<>>, <<"0x000000010e58bd08 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,17}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.67.0>,<0.71.0>,<0.68.0>,#Port<0.3353>]}, {memory,21632}, {message_queue_len,0}, {reductions,11774}, {trap_exit,true}]}, {<0.71.0>, [{registered_name,'sink-disk_error'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe0d678 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_error',\"/Users/farshid/Library/Application Support/Couchbase/var">>, <<"y(4) 'sink-disk_error'">>,<<"y(5) <0.35.0>">>,<<>>, <<"0x000000010fe0d6b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,85}]}, {heap_size,4181}, {total_heap_size,4558}, {links,[<0.35.0>,<0.72.0>]}, {memory,37344}, {message_queue_len,0}, {reductions,87752}, {trap_exit,true}]}, {<0.69.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010aff75c8 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f5ab5a0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.67.0>,<0.68.0>,1673,{arg,'sink-disk_default',2,\"/Users/farshid/Lib">>, <<>>, <<"0x000000010f5ab5b0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,65}]}, {heap_size,2584}, {total_heap_size,6765}, {links,[<0.65.0>,<0.67.0>,<0.68.0>,#Port<0.3348>]}, {memory,55080}, {message_queue_len,0}, {reductions,52743}, {trap_exit,true}]}, {<0.68.0>, [{registered_name,disk_log_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e907900 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) disk_log_server">>,<<"y(3) {state,[]}">>, <<"y(4) disk_log_server">>,<<"y(5) <0.27.0>">>,<<>>, <<"0x000000010e907938 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,104}]}, {heap_size,1597}, {total_heap_size,3194}, {links, [<0.87.0>,<0.93.0>,<0.3796.0>,<0.3761.0>,<0.90.0>,<0.75.0>, <0.81.0>,<0.84.0>,<0.78.0>,<0.69.0>,<0.72.0>,<0.27.0>]}, {memory,27072}, {message_queue_len,0}, {reductions,21359}, {trap_exit,true}]}, {<0.67.0>, [{registered_name,disk_log_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcb20a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,disk_log_sup},simple_one_for_one,[{child,undefined,disk_log,{disk_lo">>, <<"y(4) disk_log_sup">>,<<"y(5) <0.27.0>">>,<<>>, <<"0x000000010fcb20e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,54}]}, {heap_size,2584}, {total_heap_size,2961}, {links, [<0.87.0>,<0.93.0>,<0.3796.0>,<0.3761.0>,<0.90.0>,<0.75.0>, <0.81.0>,<0.84.0>,<0.78.0>,<0.69.0>,<0.72.0>,<0.27.0>]}, {memory,24968}, {message_queue_len,0}, {reductions,16920}, {trap_exit,true}]}, {<0.66.0>, [{registered_name,timer_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe13a68 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) 86">>,<<"y(2) timer">>, <<"y(3) []">>,<<"y(4) timer_server">>, <<"y(5) <0.27.0>">>,<<>>, <<"0x000000010fe13aa0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,14788}]}, {heap_size,1597}, {total_heap_size,1974}, {links, [<0.3926.0>,<0.3961.0>,<0.3994.0>,<0.12927.0>,<0.3934.0>, <0.3929.0>,<0.3888.0>,<0.3913.0>,<0.3916.0>,<0.3894.0>, <0.3775.0>,<0.3800.0>,<0.27.0>]}, {memory,17184}, {message_queue_len,0}, {reductions,8481107}, {trap_exit,true}]}, {<0.65.0>, [{registered_name,'sink-disk_default'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f769868 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ale_disk_sink">>, <<"(3) {state,'sink-disk_default',\"/Users/farshid/Library/Application Support/Couchbase/v">>, <<"y(4) 'sink-disk_default'">>,<<"y(5) <0.35.0>">>, <<>>, <<"0x000000010f7698a0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,265}]}, {heap_size,17711}, {total_heap_size,35422}, {links,[<0.35.0>,<0.69.0>]}, {memory,284256}, {message_queue_len,0}, {reductions,227973}, {trap_exit,true}]}, {<0.63.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a9bb5c8 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) ns_server">>, <<"y(2) <0.196.0>">>,<<"y(3) <0.62.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,6765}, {total_heap_size,13530}, {links,[<0.62.0>,<0.196.0>]}, {memory,109016}, {message_queue_len,0}, {reductions,27848}, {trap_exit,true}]}, {<0.62.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099fc108 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.63.0>,{appl_data,ns_server,[ns_server_sup,ns_config,ns_config_sup,ns_con">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00000001099fc128 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.7.0>,<0.63.0>]}, {memory,13656}, {message_queue_len,0}, {reductions,104}, {trap_exit,true}]}, {<0.60.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010afa8ad0 (cpu_sup:measurement_server_loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109ef17f8 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) []">>,<<"y(5) []">>, <<"y(6) []">>,<<"y(7) []">>, <<"y(8) {internal,not_used,[],{unix,darwin}}">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[]}, {memory,2560}, {message_queue_len,0}, {reductions,7}, {trap_exit,true}]}, {<0.59.0>, [{registered_name,cpu_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a9c14e8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) cpu_sup">>, <<"y(3) {state,<0.60.0>,{unix,darwin}}">>, <<"y(4) cpu_sup">>,<<"y(5) <0.53.0>">>,<<>>, <<"0x000000010a9c1520 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.53.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,33}, {trap_exit,true}]}, {<0.58.0>, [{registered_name,os_cmd_port_creator}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010a8564b0 (os:start_port_srv_loop/0 + 24)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fcf0bd8 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,76}]}, {heap_size,233}, {total_heap_size,610}, {links,[]}, {memory,5576}, {message_queue_len,0}, {reductions,11601}, {trap_exit,true}]}, {<0.55.0>, [{registered_name,memsup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f629b70 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) memsup">>, <<"(3) {state,{unix,darwin},false,{4114120000,4168124000},{<0.6.0>,1801744},false,60000,3">>, <<"y(4) memsup">>,<<"y(5) <0.53.0>">>,<<>>, <<"0x000000010f629ba8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,30}]}, {heap_size,2584}, {total_heap_size,5168}, {links,[<0.53.0>]}, {memory,42184}, {message_queue_len,0}, {reductions,174866}, {trap_exit,true}]}, {<0.54.0>, [{registered_name,disksup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a93e258 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) disksup">>, <<"y(3) {state,80,60000,{unix,darwin},[{\"7529936\",487546976,7}],#Port<0.3180>}">>, <<"y(4) disksup">>,<<"y(5) <0.53.0>">>,<<>>, <<"0x000000010a93e290 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,52}]}, {heap_size,1597}, {total_heap_size,3194}, {links,[<0.53.0>,#Port<0.3180>]}, {memory,26432}, {message_queue_len,0}, {reductions,29629}, {trap_exit,true}]}, {<0.53.0>, [{registered_name,os_mon_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f29968 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,os_mon_sup},one_for_one,[{child,<0.59.0>,cpu_sup,{cpu_sup,start_link">>, <<"y(4) os_mon_sup">>,<<"y(5) <0.52.0>">>,<<>>, <<"0x0000000109f299a0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.54.0>,<0.55.0>,<0.59.0>,<0.52.0>]}, {memory,5840}, {message_queue_len,0}, {reductions,279}, {trap_exit,true}]}, {<0.52.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099c5600 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) os_mon">>, <<"y(2) <0.53.0>">>,<<"y(3) <0.51.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.51.0>,<0.53.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<0.51.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099c4ea0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.52.0>,{appl_data,os_mon,[os_mon_sup,os_mon_sysinfo,disksup,memsup,cpu_su">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00000001099c4ec0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.52.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.48.0>, [{registered_name,release_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099f7010 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) release_handler">>, <<"(3) {state,[],\"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-r">>, <<"y(4) release_handler">>,<<"y(5) <0.44.0>">>,<<>>, <<"0x00000001099f7048 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.44.0>]}, {memory,29408}, {message_queue_len,0}, {reductions,3089}, {trap_exit,false}]}, {<0.47.0>, [{registered_name,overload}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099f0a78 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) overload">>, <<"y(3) {state,0,0,8.000000e-01,1399,1.000000e-01,{0,0},clear}">>, <<"y(4) overload">>,<<"y(5) <0.45.0>">>,<<>>, <<"0x00000001099f0ab0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.45.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,39}, {trap_exit,false}]}, {<0.46.0>, [{registered_name,alarm_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f29218 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"y(2) [{handler,alarm_handler,false,[{system_memory_high_watermark,[]}],false}]">>, <<"y(3) alarm_handler">>,<<"y(4) <0.45.0>">>,<<>>, <<"0x0000000109f29248 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.45.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,43}, {trap_exit,true}]}, {<0.45.0>, [{registered_name,sasl_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f2a0c0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,sasl_safe_sup},one_for_one,[{child,<0.47.0>,overload,{overload,start">>, <<"y(4) sasl_safe_sup">>,<<"y(5) <0.44.0>">>,<<>>, <<"0x0000000109f2a0f8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.46.0>,<0.47.0>,<0.44.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,170}, {trap_exit,true}]}, {<0.44.0>, [{registered_name,sasl_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099f0320 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,sasl_sup},one_for_one,[{child,<0.48.0>,release_handler,{release_hand">>, <<"y(4) sasl_sup">>,<<"y(5) <0.43.0>">>,<<>>, <<"0x00000001099f0358 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.45.0>,<0.48.0>,<0.43.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,158}, {trap_exit,true}]}, {<0.43.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f28ad8 Return addr 0x000000010963bc08 ()">>, <<"y(0) {state,tty,{undefined,undefined,undefined}}">>, <<"y(1) sasl">>,<<"y(2) <0.44.0>">>, <<"y(3) <0.42.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.42.0>,<0.44.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,101}, {trap_exit,true}]}, {<0.42.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099efbe0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.43.0>,{appl_data,sasl,[sasl_sup,alarm_handler,overload,release_handler],">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00000001099efc00 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.43.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.38.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010ae43e18 (io:wait_io_mon_reply/2 + 56)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a9c0da8 Return addr 0x000000010ae431d0 (io:parse_erl_exprs/3 + 144)">>, <<"y(0) #Ref<0.0.0.34>">>,<<"y(1) <0.24.0>">>,<<>>, <<"0x000000010a9c0dc0 Return addr 0x000000010ab207b0 (shell:'-get_command/5-fun-0-'/1 + 40)">>, <<"y(0) []">>,<<>>, <<"0x000000010a9c0dd0 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.25.0>]}, {memory,2672}, {message_queue_len,0}, {reductions,14}, {trap_exit,false}]}, {<0.37.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010ab13e98 (shell:eval_loop/3 + 56)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109ef1f68 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) 8207">>,<<"y(4) []">>,<<"y(5) <0.25.0>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.25.0>]}, {memory,2600}, {message_queue_len,0}, {reductions,8}, {trap_exit,false}]}, {<0.36.0>, [{registered_name,ale}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fe24648 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>,<<"y(2) ale">>, <<"(3) {state,{dict,11,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{">>, <<"y(4) ale">>,<<"y(5) <0.34.0>">>,<<>>, <<"0x000000010fe24680 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,96}]}, {heap_size,6765}, {total_heap_size,24476}, {links,[<0.34.0>,<0.6.0>]}, {memory,196688}, {message_queue_len,0}, {reductions,626249}, {trap_exit,true}]}, {<0.35.0>, [{registered_name,ale_dynamic_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f18700 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ale_dynamic_sup},one_for_one,[{child,<0.171.0>,'sink-stderr',{ale_st">>, <<"y(4) ale_dynamic_sup">>,<<"y(5) <0.34.0>">>,<<>>, <<"0x0000000109f18738 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,2584}, {total_heap_size,3571}, {links, [<0.86.0>,<0.92.0>,<0.95.0>,<0.171.0>,<0.89.0>,<0.74.0>, <0.80.0>,<0.83.0>,<0.77.0>,<0.65.0>,<0.71.0>,<0.34.0>]}, {memory,29848}, {message_queue_len,0}, {reductions,1042}, {trap_exit,true}]}, {<0.34.0>, [{registered_name,ale_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010aca7f38 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ale_sup},one_for_all,[{child,<0.36.0>,ale,{ale,start_link,[]},perman">>, <<"y(4) ale_sup">>,<<"y(5) <0.33.0>">>,<<>>, <<"0x000000010aca7f70 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.35.0>,<0.36.0>,<0.33.0>]}, {memory,5800}, {message_queue_len,0}, {reductions,166}, {trap_exit,true}]}, {<0.33.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f28380 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) ale_app">>, <<"y(2) <0.34.0>">>,<<"y(3) <0.32.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.32.0>,<0.34.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,49}, {trap_exit,true}]}, {<0.32.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099e5748 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.33.0>,{appl_data,ale,[],undefined,{ale_app,[]},[ale,ale_app,ale_codegen,">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00000001099e5768 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.33.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.27.0>, [{registered_name,kernel_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010a904500 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,kernel_safe_sup},one_for_one,[{child,<0.338.0>,dets,{dets_server,sta">>, <<"y(4) kernel_safe_sup">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x000000010a904538 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,610}, {total_heap_size,987}, {links, [<0.68.0>,<0.337.0>,<0.338.0>,<0.303.0>,<0.66.0>,<0.67.0>, <0.11.0>]}, {memory,8976}, {message_queue_len,0}, {reductions,465}, {trap_exit,true}]}, {<0.26.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109a02e90 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) kernel_config">>,<<"y(3) []">>, <<"y(4) <0.26.0>">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x0000000109a02ec8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,268}, {trap_exit,true}]}, {<0.25.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010ab0eb50 (shell:get_command1/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099ed9c0 Return addr 0x000000010ab0e048 (shell:server_loop/7 + 272)">>, <<"y(0) []">>,<<"y(1) 8207">>,<<"y(2) []">>, <<"y(3) <0.37.0>">>,<<"y(4) <0.38.0>">>,<<>>, <<"0x00000001099ed9f0 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) 1">>,<<"y(5) 20">>, <<"y(6) 20">>,<<"y(7) 8207">>,<<"y(8) 0">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,1597}, {total_heap_size,5778}, {links,[<0.37.0>,<0.38.0>,<0.24.0>]}, {memory,47144}, {message_queue_len,0}, {reductions,6363}, {trap_exit,true}]}, {<0.24.0>, [{registered_name,user}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010aaf4260 (user:get_chars/8 + 280)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f569590 Return addr 0x000000010aaef0e0 (user:do_io_request/5 + 88)">>, <<"y(0) []">>,<<"y(1) []">>,<<"y(2) []">>, <<"y(3) unicode">>,<<"y(4) start">>, <<"y(5) {[],[]}">>,<<"y(6) #Port<0.384>">>, <<"y(7) {erl_scan,tokens,[1]}">>,<<"y(8) get_until">>, <<"y(9) io_lib">>,<<"y(10) [\"1\",62,32]">>,<<>>, <<"0x000000010f5695f0 Return addr 0x000000010aaeef50 (user:server_loop/2 + 1408)">>, <<"y(0) #Port<0.384>">>,<<"y(1) <0.24.0>">>, <<"y(2) <0.38.0>">>,<<>>, <<"0x000000010f569610 Return addr 0x000000010aaee5b0 (user:catch_loop/3 + 112)">>, <<"y(0) #Port<0.384>">>,<<>>, <<"0x000000010f569620 Return addr 0x000000010963bc08 ()">>, <<"y(0) <0.25.0>">>,<<"y(1) #Port<0.384>">>, <<"y(2) Catch 0x000000010aaee5b0 (user:catch_loop/3 + 112)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,10946}, {total_heap_size,57314}, {links,[<0.22.0>,<0.25.0>,#Port<0.384>,<0.6.0>]}, {memory,459544}, {message_queue_len,0}, {reductions,3613951}, {trap_exit,true}]}, {<0.22.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f23fd0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_bridge">>, <<"y(3) {state,user_sup,<0.24.0>,<0.24.0>,{<0.22.0>,user_sup}}">>, <<"y(4) <0.22.0>">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x0000000109f24008 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,4181}, {total_heap_size,4181}, {links,[<0.11.0>,<0.24.0>]}, {memory,34328}, {message_queue_len,0}, {reductions,325}, {trap_exit,true}]}, {<0.21.0>, [{registered_name,standard_error}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010aa7b6e8 (standard_error:server_loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010fd9d740 Return addr 0x000000010963bc08 ()">>, <<"y(0) #Port<0.370>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,179}]}, {heap_size,28657}, {total_heap_size,150050}, {links,[<0.20.0>,#Port<0.370>]}, {memory,1201280}, {message_queue_len,0}, {reductions,9904126}, {trap_exit,true}]}, {<0.20.0>, [{registered_name,standard_error_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f274b0 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_bridge">>, <<"y(3) {state,standard_error,<0.21.0>,<0.21.0>,{local,standard_error_sup}}">>, <<"y(4) standard_error_sup">>,<<"y(5) <0.11.0>">>, <<>>, <<"0x0000000109f274e8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>,<0.21.0>]}, {memory,2744}, {message_queue_len,0}, {reductions,41}, {trap_exit,true}]}, {<0.19.0>, [{registered_name,code_server}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010aaaa2c8 (code_server:loop/1 + 128)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000111909388 Return addr 0x000000010963bc08 ()">>, <<"(0) {state,<0.11.0>,\"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-">>, <<"y(1) <0.11.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,404}]}, {heap_size,10946}, {total_heap_size,85971}, {links,[<0.11.0>]}, {memory,688504}, {message_queue_len,0}, {reductions,832281}, {trap_exit,true}]}, {<0.18.0>, [{registered_name,file_server_2}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010b8139a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) file_server">>,<<"y(3) #Port<0.54>">>, <<"y(4) file_server_2">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x000000010b8139e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,23463}]}, {heap_size,1597}, {total_heap_size,12543}, {links,[#Port<0.54>,<0.11.0>]}, {memory,101224}, {message_queue_len,0}, {reductions,16327570}, {trap_exit,true}]}, {<0.17.0>, [{registered_name,global_group}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f255a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) global_group">>, <<"y(3) {state,no_conf,true,[],[],[],[],[],'nonode@nohost',[],normal,normal}">>, <<"y(4) global_group">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x0000000109f255e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>]}, {memory,2704}, {message_queue_len,0}, {reductions,67}, {trap_exit,true}]}, {<0.16.0>, [{registered_name,inet_db}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f24e50 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) inet_db">>, <<"(3) {state,inet_db,inet_cache,inet_hosts_byname,inet_hosts_byaddr,inet_hosts_file_byna">>, <<"y(4) inet_db">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x0000000109f24e88 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.11.0>]}, {memory,5720}, {message_queue_len,0}, {reductions,251}, {trap_exit,true}]}, {<0.15.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010a871030 (global:loop_the_registrar/0 + 24)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010cf9d988 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,6}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.13.0>]}, {memory,24424}, {message_queue_len,0}, {reductions,1411}, {trap_exit,false}]}, {<0.14.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010a869f38 (global:loop_the_locker/1 + 768)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109a041e8 Return addr 0x000000010a869bf8 (global:init_the_locker/1 + 328)">>, <<"y(0) {multi,[],[],[],'nonode@nohost',false,false}">>, <<"y(1) infinity">>,<<>>, <<"0x0000000109a04200 Return addr 0x000000010963bc08 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.13.0>]}, {memory,2600}, {message_queue_len,0}, {reductions,20}, {trap_exit,true}]}, {<0.13.0>, [{registered_name,global_name_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f79cf68 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) global">>, <<"y(3) {state,true,[],[],[],[],'ns_1@127.0.0.1',<0.14.0>,<0.15.0>,no_trace,false}">>, <<"y(4) global_name_server">>,<<"y(5) <0.11.0>">>, <<>>, <<"0x000000010f79cfa0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,21}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.14.0>,<0.15.0>,<0.11.0>]}, {memory,9032}, {message_queue_len,0}, {reductions,5723}, {trap_exit,true}]}, {<0.12.0>, [{registered_name,rex}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109f0d9a8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>,<<"y(2) rpc">>, <<"y(3) {1,{<0.28830.0>,{<0.28829.0>,{#Ref<0.0.1.229818>,'ns_1@127.0.0.1'}},nil,nil}}">>, <<"y(4) rex">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x0000000109f0d9e0 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.11.0>]}, {memory,5864}, {message_queue_len,0}, {reductions,138}, {trap_exit,true}]}, {<0.11.0>, [{registered_name,kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099ea800 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,kernel_sup},one_for_all,[{child,<0.305.0>,net_sup_dynamic,{erl_distr">>, <<"y(4) kernel_sup">>,<<"y(5) <0.10.0>">>,<<>>, <<"0x00000001099ea838 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,2584}, {total_heap_size,5168}, {links, [<0.20.0>,<0.26.0>,<0.27.0>,<0.305.0>,<0.22.0>,<0.16.0>, <0.18.0>,<0.19.0>,<0.17.0>,<0.12.0>,<0.13.0>,<0.10.0>]}, {memory,42624}, {message_queue_len,0}, {reductions,1735}, {trap_exit,true}]}, {<0.10.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x000000010a7cd670 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x00000001099f1f60 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) kernel">>, <<"y(2) <0.11.0>">>,<<"y(3) <0.9.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.9.0>,<0.11.0>]}, {memory,2640}, {message_queue_len,0}, {reductions,69}, {trap_exit,true}]}, {<0.9.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000010a7cb3a0 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x0000000109a01ff8 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.10.0>,{appl_data,kernel,[application_controller,erl_reply,auth,boot_serv">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x0000000109a02018 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.7.0>,<0.10.0>]}, {memory,6912}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<0.7.0>, [{registered_name,application_controller}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x000000010a7d3f38 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010e5aeaf0 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) application_controller">>, <<"(3) {state,[],[],[],[{mnesia,<0.3767.0>},{mapreduce,undefined},{couch_index_merger,und">>, <<"y(4) application_controller">>,<<"y(5) <0.2.0>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,13}]}, {heap_size,17711}, {total_heap_size,139104}, {links, [<0.199.0>,<0.231.0>,<0.3767.0>,<0.217.0>,<0.224.0>,<0.205.0>, <0.42.0>,<0.51.0>,<0.62.0>,<0.9.0>,<0.32.0>,<0.0.0>]}, {memory,1114112}, {message_queue_len,0}, {reductions,118461}, {trap_exit,true}]}, {<0.6.0>, [{registered_name,error_logger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x0000000109c21c00 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f738f78 Return addr 0x0000000109c348b0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,couch_log,false,{state,2,true},<0.3753.0>},{handler,sasl_report_tty_h,fa">>, <<"y(3) error_logger">>,<<"y(4) <0.2.0>">>,<<>>, <<"0x000000010f738fa8 Return addr 0x000000010963bc08 ()">>, <<"y(0) Catch 0x0000000109c348d0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,86}]}, {heap_size,28657}, {total_heap_size,225075}, {links,[<0.24.0>,<0.3753.0>,<0.3761.0>,<0.36.0>,<0.0.0>]}, {memory,1801744}, {message_queue_len,0}, {reductions,4411542}, {trap_exit,true}]}, {<0.3.0>, [{registered_name,erl_prim_loader}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x0000000109bd8378 (erl_prim_loader:loop/3 + 176)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010f746300 Return addr 0x000000010963bc08 ()">>, <<"y(0) []">>, <<"(1) [\"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchb">>, <<"y(2) <0.2.0>">>, <<"(3) {state,efile,[],none,#Port<0.1>,infinity,undefined,true,{prim_state,false,undefine">>, <<"y(4) infinity">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1447}]}, {heap_size,6765}, {total_heap_size,13530}, {links,[#Port<0.1>,<0.0.0>]}, {memory,109016}, {message_queue_len,0}, {reductions,5557138}, {trap_exit,true}]}, {<0.0.0>, [{registered_name,init}, {status,waiting}, {initial_call,{otp_ring0,start,2}}, {backtrace, [<<"Program counter: 0x0000000109981b98 (init:loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>,<<"arity = 0">>,<<>>, <<"0x000000010ac068e8 Return addr 0x000000010963bc08 ()">>, <<"(0) {state,[{'-root',[<<139 bytes>>]},{'-progname',[<<3 bytes>>]},{'-home',[<<14 bytes">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,795}]}, {heap_size,6765}, {total_heap_size,13530}, {links,[<0.6.0>,<0.7.0>,<0.3.0>]}, {memory,109056}, {message_queue_len,0}, {reductions,268889}, {trap_exit,true}]}]} nodes_info = [{struct, [{systemStats, {struct, [{cpu_utilization_rate,12.158808933002481}, {swap_total,6442450944}, {swap_used,5988909056}]}}, {interestingStats,{struct,[]}}, {uptime,<<"2578">>}, {memoryTotal,4168124000}, {memoryFree,54004000}, {mcdMemoryReserved,3180}, {mcdMemoryAllocated,3180}, {couchApiBase,<<"http://127.0.0.1:8092/">>}, {otpNode,<<"ns_1@127.0.0.1">>}, {otpCookie,<<"bptrojzpwfmfrqou">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {thisNode,true}, {hostname,<<"127.0.0.1:8091">>}, {clusterCompatibility,131072}, {version,<<"2.0.0-1949-rel-community">>}, {os,<<"i386-apple-darwin11.4.0">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]}] buckets = [] logs: ------------------------------- 2012-11-13 09:56:20.869 ns_cookie_manager:3:info:cookie update(ns_1@127.0.0.1) - Initial otp cookie generated: bptrojzpwfmfrqou 2012-11-13 09:56:21.477 mb_master:0:info:message(ns_1@127.0.0.1) - I'm the only node, so I'm the master. 2012-11-13 09:56:21.755 ns_orchestrator:0:warning:message(ns_1@127.0.0.1) - Changed cluster compat mode from undefined to [2,0] 2012-11-13 09:56:22.127 menelaus_sup:1:info:web start ok(ns_1@127.0.0.1) - Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. 2012-11-13 09:56:35.974 mb_master:0:info:message(ns_1@127.0.0.1) - I'm the only node, so I'm the master. 2012-11-13 09:56:35.984 menelaus_sup:1:info:web start ok(ns_1@127.0.0.1) - Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. 2012-11-13 10:00:17.457 menelaus_web:12:info:message(ns_1@127.0.0.1) - Created bucket "default" of type: membase [{num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}] 2012-11-13 10:00:18.533 ns_memcached:1:info:message(ns_1@127.0.0.1) - Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. 2012-11-13 10:01:24.323 ns_memcached:2:info:message(ns_1@127.0.0.1) - Shutting down bucket "default" on 'ns_1@127.0.0.1' for server shutdown 2012-11-13 10:01:29.602 mb_master:0:info:message(ns_1@127.0.0.1) - I'm the only node, so I'm the master. 2012-11-13 10:01:29.620 menelaus_sup:1:info:web start ok(ns_1@127.0.0.1) - Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. 2012-11-13 10:01:31.184 ns_memcached:1:info:message(ns_1@127.0.0.1) - Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. 2012-11-13 10:01:38.582 ns_memcached:2:info:message(ns_1@127.0.0.1) - Shutting down bucket "default" on 'ns_1@127.0.0.1' for server shutdown 2012-11-13 10:01:41.889 mb_master:0:info:message(ns_1@127.0.0.1) - I'm the only node, so I'm the master. 2012-11-13 10:01:41.897 menelaus_sup:1:info:web start ok(ns_1@127.0.0.1) - Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. 2012-11-13 10:01:43.433 ns_memcached:1:info:message(ns_1@127.0.0.1) - Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. 2012-11-13 10:23:04.205 menelaus_web:2:info:message(ns_1@127.0.0.1) - Invalid delete received: {mochiweb_request,#Port<0.12920>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] 2012-11-13 10:26:53.260 menelaus_web:2:info:message(ns_1@127.0.0.1) - Invalid delete received: {mochiweb_request,#Port<0.13316>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] 2012-11-13 10:26:58.156 menelaus_web:2:info:message(ns_1@127.0.0.1) - Invalid delete received: {mochiweb_request,#Port<0.13324>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] 2012-11-13 10:27:34.333 ns_memcached:2:info:message(ns_1@127.0.0.1) - Shutting down bucket "default" on 'ns_1@127.0.0.1' for deletion 2012-11-13 10:27:34.743 menelaus_web:11:info:message(ns_1@127.0.0.1) - Deleted bucket "default" 2012-11-13 10:35:18.154 ns_memcached:1:info:message(ns_1@127.0.0.1) - Bucket "beer-sample" loaded on node 'ns_1@127.0.0.1' in 0 seconds. 2012-11-13 10:36:10.435 menelaus_web:102:warning:client-side error report(ns_1@127.0.0.1) - Client-side error-report for user "Administrator" on node 'ns_1@127.0.0.1': User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11 Got unhandled error: Uncaught TypeError: Cannot call method 'concat' of undefined At: http://127.0.0.1:8091/js/analytics.js:186 Backtrace: Function: collectBacktraceViaCaller Args: --------- Function: appOnError Args: "Uncaught TypeError: Cannot call method 'concat' of undefined" "http://127.0.0.1:8091/js/analytics.js" 186 --------- 2012-11-13 10:36:43.365 ns_memcached:2:info:message(ns_1@127.0.0.1) - Shutting down bucket "beer-sample" on 'ns_1@127.0.0.1' for deletion 2012-11-13 10:36:43.907 menelaus_web:11:info:message(ns_1@127.0.0.1) - Deleted bucket "beer-sample" 2012-11-13 10:37:13.442 samples_loader_tasks:0:critical:message(ns_1@127.0.0.1) - Loading sample bucket beer-sample failed: {failed_to_load_samples_with_status, 1} logs_node (debug): ------------------------------- [ns_server:info,2012-11-13T9:56:16.201,nonode@nohost:<0.63.0>:ns_server:init_logging:225]Started & configured logging [ns_server:info,2012-11-13T9:56:16.221,nonode@nohost:<0.63.0>:ns_server:log_pending:30]Static config terms: [{error_logger_mf_dir,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {error_logger_mf_maxbytes,10485760}, {error_logger_mf_maxfiles,20}, {path_config_bindir,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin"}, {path_config_etcdir,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase"}, {path_config_libdir,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib"}, {path_config_datadir,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase"}, {path_config_tmpdir,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/tmp"}, {loglevel_default,debug}, {loglevel_couchdb,info}, {loglevel_ns_server,debug}, {loglevel_error_logger,debug}, {loglevel_user,debug}, {loglevel_menelaus,debug}, {loglevel_ns_doctor,debug}, {loglevel_stats,debug}, {loglevel_rebalance,debug}, {loglevel_cluster,debug}, {loglevel_views,debug}, {loglevel_mapreduce_errors,debug}, {loglevel_xdcr,debug}] [error_logger:info,2012-11-13T9:56:16.418,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,crypto_sup} started: [{pid,<0.202.0>}, {name,crypto_server}, {mfargs,{crypto_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.419,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: crypto started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.432,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: public_key started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.477,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.208.0>}, {name,ftp_sup}, {mfargs,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.506,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,httpc_profile_sup} started: [{pid,<0.211.0>}, {name,httpc_manager}, {mfargs, {httpc_manager,start_link, [default,only_session_cookies,inets]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.507,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.210.0>}, {name,httpc_profile_sup}, {mfargs, {httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.511,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.212.0>}, {name,httpc_handler_sup}, {mfargs,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.512,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.209.0>}, {name,httpc_sup}, {mfargs, {httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.517,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.213.0>}, {name,httpd_sup}, {mfargs,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.534,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.214.0>}, {name,tftp_sup}, {mfargs,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.535,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: inets started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.535,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: oauth started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.569,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.220.0>}, {name,ssl_broker_sup}, {mfargs,{ssl_broker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.581,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.221.0>}, {name,ssl_manager}, {mfargs,{ssl_manager,start_link,[[]]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.584,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.222.0>}, {name,ssl_connection}, {mfargs,{ssl_connection_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.585,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: ssl started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.690,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.229.0>}, {name,ssl_server}, {mfargs,{ssl_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.691,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,lhttpc_sup} started: [{pid,<0.227.0>}, {name,lhttpc_manager}, {mfargs, {lhttpc_manager,start_link, [[{name,lhttpc_manager}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.692,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: lhttpc started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.702,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mochiweb started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.729,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.744,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.752,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.757,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.860,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.238.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.238.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.182,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.241.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.183,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.242.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.191,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.243.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.205,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.244.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.228,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.245.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.229,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.246.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.231,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.247.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.232,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.248.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.235,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.249.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.245,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.252.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.246,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.253.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.247,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.240.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.258,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.255.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.558,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.256.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.669,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.267.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.685,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.270.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.687,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.272.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.710,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.273.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.713,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.275.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.728,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.277.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.849,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.279.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.850,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.296.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.851,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.254.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.853,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.239.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.854,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.197.0>}, {name,cb_couch_sup}, {mfargs,{cb_couch_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,supervisor}] [error_logger:error,2012-11-13T9:56:17.916,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.249.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.917,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.298.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:17.995,nonode@nohost:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{9,56,17}}}, {memory, [{total,18081768}, {processes,5843992}, {processes_used,5837960}, {system,12237776}, {atom,853497}, {atom_used,849643}, {binary,92200}, {code,8180155}, {ets,1203608}]}, {loaded, [ns_info,log_os_info,couch_config_writer,cb_init_loggers, mochiweb_acceptor,inet_tcp,gen_tcp,mochiweb_socket, mochiweb_socket_server,mochilists,mochiweb_http,eval_bits, couch_httpd,couch_view,couch_set_view_ddoc_cache, couch_query_servers,couch_spatial,mapreduce, couch_set_view,snappy,couch_compress, couch_spatial_validation,couch_set_view_mapreduce,ejson, couch_doc,couch_db_update_notifier,couch_btree, couch_ref_counter,couch_uuids,couch_db_updater,couch_db, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,couch_index_barrier,couch_event_sup, couch_log,couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,155}, {node,nonode@nohost}, {nodes,[]}, {registered, [kernel_safe_sup,inet_db,rex,kernel_sup,couch_view, global_name_server,couch_auth_cache,couch_uuids,sasl_sup, file_server_2,couch_task_status,ale,lhttpc_manager, couch_server_sup,lhttpc_sup,couch_server,global_group, os_cmd_port_creator,couch_set_view_ddoc_cache, couch_index_merger_connection_pool,erl_prim_loader, tftp_sup,couch_spatial,'sink-ns_log',couch_rep_sup, 'sink-disk_stats','sink-disk_xdcr_errors',release_handler, standard_error_sup,couch_query_servers,'sink-disk_xdcr', httpc_sup,'sink-disk_debug','sink-disk_couchdb',overload, 'sink-disk_mapreduce_errors',error_logger,disk_log_sup, 'sink-stderr',httpc_profile_sup,'sink-disk_views', disk_log_server,httpc_manager,'sink-disk_error', alarm_handler,httpc_handler_sup,couch_set_view,os_mon_sup, code_server,ftp_sup,cpu_sup,application_controller, ssl_connection_sup,memsup,disksup,standard_error, couch_replica_index_barrier,couch_httpd,sasl_safe_sup, inets_sup,ssl_manager,couch_file_write_guard, crypto_server,couch_main_index_barrier,crypto_sup,ale_sup, couch_replication,timer_server,mochiweb_sup,couch_drv, couch_log,'sink-disk_default',ale_dynamic_sup, couch_task_events,ns_server_cluster_sup, couch_secondary_services,couch_db_update_notifier_sup, couch_primary_services,couch_db_update,user,cb_couch_sup, ssl_broker_sup,httpd_sup,ssl_server,init,ssl_sup, couch_config]}, {cookie,nocookie}, {wordsize,8}, {wall_clock,4}] [ns_server:info,2012-11-13T9:56:18.053,nonode@nohost:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:info,2012-11-13T9:56:18.073,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.301.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:18.076,nonode@nohost:dist_manager<0.302.0>:dist_manager:read_address_config:55]Reading ip config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/ip" [ns_server:info,2012-11-13T9:56:18.077,nonode@nohost:dist_manager<0.302.0>:dist_manager:init:125]ip config not found. Looks like we're brand new node [error_logger:info,2012-11-13T9:56:18.081,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inet_gethost_native_sup} started: [{pid,<0.304.0>},{mfa,{inet_gethost_native,init,[[]]}}] [error_logger:info,2012-11-13T9:56:18.083,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.303.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:18.137,nonode@nohost:dist_manager<0.302.0>:dist_manager:bringup:193]Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:18.161,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.306.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.162,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.307.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:18.163,ns_1@127.0.0.1:dist_manager<0.302.0>:dist_manager:bringup:201]Attempted to save node name to disk: undefined [error_logger:info,2012-11-13T9:56:18.164,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.308.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.165,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.305.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.167,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.302.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.168,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.311.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.185,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.312.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.211,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.314.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.323,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.321.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.324,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.323.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.325,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.324.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.338,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.325.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.344,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.326.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.371,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.327.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.386,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.328.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.398,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.329.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.404,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.330.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.416,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.331.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.417,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.322.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.418,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:18.519,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.337.0>}, {name,dets_sup}, {mfargs,{dets_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.524,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.338.0>}, {name,dets}, {mfargs,{dets_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:18.869,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:ensure_schema:432]Committed schema to disk. [ns_server:debug,2012-11-13T9:56:19.272,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:init:268]Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@127.0.0.1']}, {debug,verbose}, {directory,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/mnesia"}, {dump_log_load_regulation,false}, {dump_log_time_threshold,180000}, {dump_log_update_in_place,true}, {dump_log_write_threshold,1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup,false}, {fallback_error_function,{mnesia,lkill}}, {is_running,yes}, {local_tables,[local_config,cluster,schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{8,0}}, {running_db_nodes,['ns_1@127.0.0.1']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers,[<0.321.0>,<0.315.0>]}, {tables,[local_config,cluster,schema]}, {transaction_commits,5}, {transaction_failures,0}, {transaction_log_writes,6}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {send_compressed,0}, {version,"4.5"}] Peers: ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T9:56:19.273,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:189]Info from Mnesia: Create Directory "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/mnesia" [ns_server:debug,2012-11-13T9:56:19.273,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,schema, [{name,schema}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,false}, {record_name,schema}, {attributes,[table,cstruct]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829378,352918},'ns_1@127.0.0.1'}}, {version,{{3,0},{'ns_1@127.0.0.1',{1352,829378,415825}}}}]}, {tid,3,<0.334.0>}} [error_logger:info,2012-11-13T9:56:19.273,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.315.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:19.274,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:217]Peers: ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T9:56:19.275,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:217]Peers: ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T9:56:19.275,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,local_config, [{name,local_config}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,local_config}, {attributes,[key,val]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829379,170800},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,5,<0.357.0>}} [error_logger:info,2012-11-13T9:56:19.275,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.313.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:56:19.276,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,local_config, [{name,local_config}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,local_config}, {attributes,[key,val]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829379,170800},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,5,<0.357.0>}} [ns_server:info,2012-11-13T9:56:19.298,ns_1@127.0.0.1:ns_config_sup<0.363.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T9:56:19.299,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.364.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:19.300,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.365.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:19.360,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [ns_server:info,2012-11-13T9:56:19.380,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [ns_server:info,2012-11-13T9:56:19.380,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:692]No dynamic config file found. Assuming we're brand new node [ns_server:debug,2012-11-13T9:56:19.380,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:695]Here's full dynamic config we loaded: [] [ns_server:info,2012-11-13T9:56:19.381,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{auto_failover_cfg,[{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}, {replication,[{enabled,true}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {buckets,[{configs,[]}]}, {memory_quota,2391}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',isasl}, [{path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {remote_clusters,[]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {rest,[{port,8091}]}, {{node,'ns_1@127.0.0.1',membership},active}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_num_worker_process,4}, {xdcr_connection_timeout,60}, {xdcr_worker_batch_size,100}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_checkpoint_interval,1800}] [ns_server:info,2012-11-13T9:56:19.386,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_7_to_1_7_1:302]Upgrading config from 1.7 to 1.7.1 [ns_server:debug,2012-11-13T9:56:19.388,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_upgrade_config:469]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,1}}, {set,email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {set,auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}] [ns_server:info,2012-11-13T9:56:19.406,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_7_1_to_1_7_2:313]Upgrading config from 1.7.1 to 1.7.2 [ns_server:debug,2012-11-13T9:56:19.408,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_upgrade_config:469]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,2}}] [ns_server:info,2012-11-13T9:56:19.409,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_7_2_to_1_8_0:367]Upgrading config from 1.7.2 to 1.8.0 [ns_server:debug,2012-11-13T9:56:19.411,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_upgrade_config:469]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,8,0}}, {set,{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}]}] [ns_server:info,2012-11-13T9:56:19.413,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_8_0_to_1_8_1:404]Upgrading config from 1.8.0 to 1.8.1 [ns_server:debug,2012-11-13T9:56:19.416,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_upgrade_config:469]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,8,1}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {set, {node,'ns_1@127.0.0.1',isasl}, [{path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {set, {node,'ns_1@127.0.0.1',port_servers}, [{moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {set, {node,'ns_1@127.0.0.1',ns_log}, [{filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}] [ns_server:info,2012-11-13T9:56:19.420,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_8_1_to_2_0:433]Upgrading config from 1.8.1 to 2.0 [ns_server:debug,2012-11-13T9:56:19.422,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_upgrade_config:469]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{2,0}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}] [ns_server:debug,2012-11-13T9:56:19.423,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_init:514]Upgraded initial config: {config, {full, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", undefined,ns_config_default}, [[], [{xdcr_checkpoint_interval,1800}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_worker_batch_size,100}, {xdcr_connection_timeout,60}, {xdcr_num_worker_process,4}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {directory, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config"}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {rest_creds,[{creds,[]}]}, {remote_clusters,[]}, {{node,'ns_1@127.0.0.1',isasl}, [{path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {memory_quota,2391}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}]], [[{{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,2391}, {nodes_wanted,['ns_1@127.0.0.1']}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} [error_logger:info,2012-11-13T9:56:19.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.366.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:19.485,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.369.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:19.487,ns_1@127.0.0.1:ns_config_isasl_sync<0.370.0>:ns_config_isasl_sync:init:53]isasl_sync init: ["/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw", "_admin","_admin"] [ns_server:debug,2012-11-13T9:56:19.487,ns_1@127.0.0.1:ns_config_isasl_sync<0.370.0>:ns_config_isasl_sync:init:61]isasl_sync init buckets: [] [ns_server:debug,2012-11-13T9:56:19.491,ns_1@127.0.0.1:ns_config_isasl_sync<0.370.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [ns_server:warn,2012-11-13T9:56:19.514,ns_1@127.0.0.1:ns_config_isasl_sync<0.370.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T9:56:20.516,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.370.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.587,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.373.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.650,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.375.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.651,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.363.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:20.724,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.377.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:warn,2012-11-13T9:56:20.836,ns_1@127.0.0.1:ns_log<0.379.0>:ns_log:read_logs:69]Couldn't load logs from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log" (perhaps it's first startup): {error, enoent} [error_logger:info,2012-11-13T9:56:20.837,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.379.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.863,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.380.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.864,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.381.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.384.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:20.868,ns_1@127.0.0.1:ns_node_disco<0.385.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [ns_server:debug,2012-11-13T9:56:20.868,ns_1@127.0.0.1:ns_cookie_manager<0.311.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [user:info,2012-11-13T9:56:20.869,ns_1@127.0.0.1:ns_cookie_manager<0.311.0>:ns_cookie_manager:do_cookie_init:91]Initial otp cookie generated: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:20.869,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: otp -> [{cookie,bptrojzpwfmfrqou}] [ns_server:debug,2012-11-13T9:56:20.869,ns_1@127.0.0.1:<0.386.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:20.892,ns_1@127.0.0.1:<0.386.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [error_logger:info,2012-11-13T9:56:20.893,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.385.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.245,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.389.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.253,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.390.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:21.255,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:init:66]init pulling [ns_server:debug,2012-11-13T9:56:21.255,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:init:68]init pushing [error_logger:info,2012-11-13T9:56:21.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.391.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:21.303,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2012-11-13T9:56:21.303,ns_1@127.0.0.1:ns_config_events<0.364.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2012-11-13T9:56:21.303,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: otp -> [{cookie,bptrojzpwfmfrqou}] [ns_server:debug,2012-11-13T9:56:21.303,ns_1@127.0.0.1:ns_config_events<0.364.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2012-11-13T9:56:21.304,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,0} [ns_server:debug,2012-11-13T9:56:21.304,ns_1@127.0.0.1:ns_cookie_manager<0.311.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T9:56:21.304,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: auto_failover_cfg -> [{enabled,false},{timeout,30},{max_nodes,1},{count,0}] [ns_server:debug,2012-11-13T9:56:21.304,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T9:56:21.305,ns_1@127.0.0.1:ns_cookie_manager<0.311.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T9:56:21.305,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[]}] [ns_server:debug,2012-11-13T9:56:21.305,ns_1@127.0.0.1:<0.395.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:21.305,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2012-11-13T9:56:21.306,ns_1@127.0.0.1:<0.396.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:21.306,ns_1@127.0.0.1:<0.395.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:21.306,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2012-11-13T9:56:21.306,ns_1@127.0.0.1:<0.396.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:21.307,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2012-11-13T9:56:21.307,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: max_bucket_count -> 10 [ns_server:debug,2012-11-13T9:56:21.307,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: memory_quota -> 2391 [ns_server:debug,2012-11-13T9:56:21.308,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T9:56:21.308,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: remote_clusters -> [] [ns_server:debug,2012-11-13T9:56:21.308,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: replication -> [{enabled,true}] [error_logger:info,2012-11-13T9:56:21.308,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.392.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:21.308,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([auto_failover_cfg,autocompaction,buckets, email_alerts,fast_warmup, index_aware_rebalance_disabled]..) [ns_server:debug,2012-11-13T9:56:21.309,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: rest -> [{port,8091}] [ns_server:info,2012-11-13T9:56:21.310,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [ns_server:debug,2012-11-13T9:56:21.310,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [error_logger:info,2012-11-13T9:56:21.310,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.383.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:56:21.311,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_capi_checkpoint_timeout -> 10 [ns_server:debug,2012-11-13T9:56:21.311,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_checkpoint_interval -> 1800 [ns_server:debug,2012-11-13T9:56:21.312,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_connection_timeout -> 60 [ns_server:debug,2012-11-13T9:56:21.312,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_doc_batch_size_kb -> 512 [ns_server:debug,2012-11-13T9:56:21.312,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_failure_restart_interval -> 30 [ns_server:debug,2012-11-13T9:56:21.312,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_num_http_connections -> 20 [ns_server:debug,2012-11-13T9:56:21.312,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_num_retries_per_request -> 2 [ns_server:debug,2012-11-13T9:56:21.312,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_num_worker_process -> 4 [ns_server:debug,2012-11-13T9:56:21.313,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: xdcr_worker_batch_size -> 100 [ns_server:debug,2012-11-13T9:56:21.313,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2012-11-13T9:56:21.313,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2012-11-13T9:56:21.313,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2012-11-13T9:56:21.313,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2012-11-13T9:56:21.314,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}] [ns_server:debug,2012-11-13T9:56:21.314,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2012-11-13T9:56:21.314,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}] [ns_server:debug,2012-11-13T9:56:21.315,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2012-11-13T9:56:21.315,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}] [ns_server:debug,2012-11-13T9:56:21.316,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2012-11-13T9:56:21.317,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [error_logger:info,2012-11-13T9:56:21.353,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.398.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.354,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.400.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.355,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.401.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.356,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.402.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:21.369,ns_1@127.0.0.1:ns_log_events<0.381.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2012-11-13T9:56:21.369,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.404.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.370,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.403.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:21.372,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.405.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.373,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.406.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:21.375,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [error_logger:info,2012-11-13T9:56:21.379,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.408.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.418,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.411.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:21.440,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_local_xdcr_replications:307]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2012-11-13T9:56:21.455,ns_1@127.0.0.1:ns_server_sup<0.378.0>:mb_master:check_master_takeover_needed:144]Sending master node question to the following nodes: [] [ns_server:debug,2012-11-13T9:56:21.456,ns_1@127.0.0.1:ns_server_sup<0.378.0>:mb_master:check_master_takeover_needed:146]Got replies: [] [ns_server:debug,2012-11-13T9:56:21.456,ns_1@127.0.0.1:ns_server_sup<0.378.0>:mb_master:check_master_takeover_needed:152]Was unable to discover master, not going to force mastership takeover [ns_server:error,2012-11-13T9:56:21.458,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [user:info,2012-11-13T9:56:21.477,ns_1@127.0.0.1:mb_master<0.414.0>:mb_master:init:89]I'm the only node, so I'm the master. [ns_server:debug,2012-11-13T9:56:21.595,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: dynamic_config_version -> undefined [ns_server:info,2012-11-13T9:56:21.595,ns_1@127.0.0.1:ns_config<0.366.0>:ns_online_config_upgrader:upgrade_config_on_join_from_pre_2_0_to_2_0:65]Adding some 2.0 specific keys to the config [ns_server:debug,2012-11-13T9:56:21.596,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2012-11-13T9:56:21.596,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_upgrade_config:469]Upgrading config by changes: [{set,dynamic_config_version,[2,0]},{set,vbucket_map_history,[]}] [ns_server:debug,2012-11-13T9:56:21.726,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2012-11-13T9:56:21.727,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_local_xdcr_replications:307]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T9:56:21.728,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2012-11-13T9:56:21.754,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: cluster_compat_version -> [2,0] [ns_server:debug,2012-11-13T9:56:21.754,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([cluster_compat_version]..) [ns_server:debug,2012-11-13T9:56:21.755,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T9:56:21.755,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:handle_call:119]Fully synchronized config in 7 us [user:warn,2012-11-13T9:56:21.755,ns_1@127.0.0.1:<0.419.0>:ns_orchestrator:consider_switching_compat_mode:665]Changed cluster compat mode from undefined to [2,0] [ns_server:info,2012-11-13T9:56:21.756,ns_1@127.0.0.1:ns_config<0.366.0>:ns_online_config_upgrader:upgrade_config_from_pre_2_0_to_2_0:69]Performing online config upgrade to 2.0 version [ns_server:debug,2012-11-13T9:56:21.756,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: dynamic_config_version -> undefined [ns_server:debug,2012-11-13T9:56:21.756,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:do_upgrade_config:469]Upgrading config by changes: [{set,dynamic_config_version,[2,0]}] [ns_server:debug,2012-11-13T9:56:21.756,ns_1@127.0.0.1:mb_master_sup<0.418.0>:misc:start_singleton:855]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.419.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T9:56:21.757,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [error_logger:info,2012-11-13T9:56:21.757,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.419.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:21.803,ns_1@127.0.0.1:mb_master_sup<0.418.0>:misc:start_singleton:855]start_singleton(gen_server, ns_tick, [], []): started as <0.435.0> on 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:21.804,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.435.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:22.004,ns_1@127.0.0.1:<0.436.0>:auto_failover:init:120]init auto_failover. [ns_server:debug,2012-11-13T9:56:22.004,ns_1@127.0.0.1:mb_master_sup<0.418.0>:misc:start_singleton:855]start_singleton(gen_server, auto_failover, [], []): started as <0.436.0> on 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:22.005,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.436.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.006,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.414.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.007,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.437.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.008,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.438.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.009,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.439.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.021,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.440.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.026,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.441.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.091,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.444.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.093,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.461.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.122,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.462.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2012-11-13T9:56:22.127,ns_1@127.0.0.1:ns_server_sup<0.378.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2012-11-13T9:56:22.127,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.463.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.129,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.443.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.144,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.465.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.152,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.466.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:22.166,ns_1@127.0.0.1:<0.467.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2012-11-13T9:56:22.166,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.467.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.167,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.464.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.184,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.469.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:22.195,ns_1@127.0.0.1:<0.470.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [ns_server:debug,2012-11-13T9:56:22.237,ns_1@127.0.0.1:<0.472.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [error_logger:info,2012-11-13T9:56:22.241,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.470.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:22.255,ns_1@127.0.0.1:<0.475.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2012-11-13T9:56:22.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.472.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.468.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.264,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.474.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.265,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.475.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.476.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.267,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.477.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:23.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.479.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.629,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.492.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.630,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.491.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:23.708,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.494.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:23.798,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-@system-minute' [error_logger:info,2012-11-13T9:56:23.799,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.496.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:23.799,ns_1@127.0.0.1:<0.506.0>:supervisor_cushion:init:43]starting compaction_daemon with delay of 3000 [error_logger:info,2012-11-13T9:56:23.800,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.498.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.801,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.499.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.802,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.500.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:56:24.024,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-minute', [{name,'stats_archiver-@system-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829383,799286},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,6,<0.503.0>}} [ns_server:debug,2012-11-13T9:56:24.024,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-minute', [{name,'stats_archiver-@system-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829383,799286},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,6,<0.503.0>}} [ns_server:debug,2012-11-13T9:56:24.025,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-@system-hour' [ns_server:debug,2012-11-13T9:56:24.054,ns_1@127.0.0.1:compaction_daemon<0.516.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T9:56:24.054,ns_1@127.0.0.1:compaction_daemon<0.516.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [error_logger:info,2012-11-13T9:56:24.054,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.506.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:24.293,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-hour', [{name,'stats_archiver-@system-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,26025},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,7,<0.511.0>}} [ns_server:debug,2012-11-13T9:56:24.294,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-hour', [{name,'stats_archiver-@system-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,26025},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,7,<0.511.0>}} [ns_server:debug,2012-11-13T9:56:24.294,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-@system-day' [ns_server:debug,2012-11-13T9:56:24.330,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.525.0>:xdc_rdoc_replication_srv:handle_info:132]doing replicate_newnodes_docs [error_logger:info,2012-11-13T9:56:24.331,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.525.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:24.494,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-day', [{name,'stats_archiver-@system-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,295004},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,8,<0.520.0>}} [ns_server:debug,2012-11-13T9:56:24.495,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-day', [{name,'stats_archiver-@system-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,295004},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,8,<0.520.0>}} [ns_server:info,2012-11-13T9:56:24.496,ns_1@127.0.0.1:set_view_update_daemon<0.528.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [ns_server:debug,2012-11-13T9:56:24.496,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-@system-week' [error_logger:info,2012-11-13T9:56:24.497,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.528.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:24.499,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.530.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:24.500,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.378.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:24.500,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: ns_server started_at: 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T9:56:24.628,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-week', [{name,'stats_archiver-@system-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,497479},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9,<0.532.0>}} [ns_server:debug,2012-11-13T9:56:24.629,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-week', [{name,'stats_archiver-@system-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,497479},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9,<0.532.0>}} [ns_server:debug,2012-11-13T9:56:24.631,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-@system-month' [ns_server:debug,2012-11-13T9:56:25.023,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-month', [{name,'stats_archiver-@system-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,631391},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,10,<0.539.0>}} [ns_server:debug,2012-11-13T9:56:25.024,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-month', [{name,'stats_archiver-@system-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829384,631391},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,10,<0.539.0>}} [ns_server:debug,2012-11-13T9:56:25.059,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-@system-year' [ns_server:debug,2012-11-13T9:56:25.250,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-year', [{name,'stats_archiver-@system-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829385,59785},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,11,<0.546.0>}} [ns_server:debug,2012-11-13T9:56:25.250,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-@system-year', [{name,'stats_archiver-@system-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829385,59785},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,11,<0.546.0>}} [ns_server:debug,2012-11-13T9:56:29.178,ns_1@127.0.0.1:ns_config_rep<0.392.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([uuid]..) [ns_server:debug,2012-11-13T9:56:29.180,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:log_common:111]config change: uuid -> <<"b34a9c2e03786d913446a4e84919e1d5">> [ns_server:debug,2012-11-13T9:56:32.503,ns_1@127.0.0.1:<0.529.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.528.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:32.503,ns_1@127.0.0.1:<0.526.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_node_disco_events,<0.525.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:32.504,ns_1@127.0.0.1:<0.517.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.516.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:32.504,ns_1@127.0.0.1:<0.501.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.500.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:32.504,ns_1@127.0.0.1:<0.497.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_stats_event,<0.496.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:32.504,ns_1@127.0.0.1:<0.495.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_tick_event,<0.494.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:32.504,ns_1@127.0.0.1:<0.492.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.491.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:32.505,ns_1@127.0.0.1:<0.478.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.474.0>} exited with reason killed [ns_server:debug,2012-11-13T9:56:32.505,ns_1@127.0.0.1:ns_port_memcached<0.473.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [error_logger:error,2012-11-13T9:56:32.506,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.492.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:32.707,ns_1@127.0.0.1:ns_port_memcached<0.473.0>:ns_port_server:log:171]memcached<0.473.0>: EOL on stdin. Initiating shutdown [ns_server:info,2012-11-13T9:56:33.136,ns_1@127.0.0.1:ns_port_memcached<0.473.0>:ns_port_server:handle_info:104]Port server memcached exited with status 0 [ns_server:debug,2012-11-13T9:56:33.137,ns_1@127.0.0.1:<0.471.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:info,2012-11-13T9:56:33.137,ns_1@127.0.0.1:<0.471.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T9:56:33.138,ns_1@127.0.0.1:<0.471.0>:ns_port_server:log:171]moxi<0.471.0>: EOL on stdin. Exiting [ns_server:debug,2012-11-13T9:56:33.138,ns_1@127.0.0.1:<0.442.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {master_activity_events,<0.441.0>} exited with reason killed [ns_server:info,2012-11-13T9:56:33.138,ns_1@127.0.0.1:mb_master<0.414.0>:mb_master:terminate:288]Synchronously shutting down child mb_master_sup [ns_server:debug,2012-11-13T9:56:33.139,ns_1@127.0.0.1:<0.415.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.414.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:33.139,ns_1@127.0.0.1:<0.409.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.408.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:33.139,ns_1@127.0.0.1:<0.407.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {buckets_events,<0.406.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:33.139,ns_1@127.0.0.1:<0.399.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.398.0>} exited with reason killed [ns_server:debug,2012-11-13T9:56:33.139,ns_1@127.0.0.1:<0.393.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events_local,<0.392.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:33.139,ns_1@127.0.0.1:<0.382.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.380.0>} exited with reason killed [ns_server:debug,2012-11-13T9:56:33.241,ns_1@127.0.0.1:<0.376.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.375.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:33.242,ns_1@127.0.0.1:<0.374.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.373.0>} exited with reason shutdown [ns_server:debug,2012-11-13T9:56:33.242,ns_1@127.0.0.1:<0.371.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.370.0>} exited with reason shutdown [error_logger:error,2012-11-13T9:56:33.242,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.377.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:33.242,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:wait_saver:539]Waited for saver done. State= {config, {full, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", undefined,ns_config_default}, [[], [{xdcr_checkpoint_interval,1800}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_worker_batch_size,100}, {xdcr_connection_timeout,60}, {xdcr_num_worker_process,4}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {directory, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config"}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {rest_creds,[{creds,[]}]}, {remote_clusters,[]}, {{node,'ns_1@127.0.0.1',isasl}, [{path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {memory_quota,2391}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}]], [[{uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,2391}, {nodes_wanted,['ns_1@127.0.0.1']}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} [ns_server:info,2012-11-13T9:56:33.386,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:terminate:277]Shut Mnesia down: shutdown. Exiting. [error_logger:info,2012-11-13T9:56:33.386,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mnesia exited: stopped type: temporary [error_logger:error,2012-11-13T9:56:33.491,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.483.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.483.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.480.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 2016 neighbours: [error_logger:error,2012-11-13T9:56:33.493,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.260.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.260.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.257.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 2583 neighbours: [error_logger:error,2012-11-13T9:56:33.494,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.484.0> terminating ** Last message in was {'EXIT',<0.245.0>,killed} ** When Server state == {db,<0.484.0>,<0.485.0>,nil,<<"1352829382613535">>, <0.480.0>,<0.486.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.480.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.480.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.480.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create,sys_db, {user_ctx, {user_ctx,null, [<<"_admin">>,<<"_replicator">>], undefined}}]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T9:56:33.498,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.484.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 313 neighbours: [error_logger:error,2012-11-13T9:56:33.499,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.261.0> terminating ** Last message in was {'EXIT',<0.245.0>,killed} ** When Server state == {db,<0.261.0>,<0.262.0>,nil,<<"1352829377441480">>, <0.257.0>,<0.263.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.257.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.257.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.257.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create, {user_ctx, {user_ctx,null,[<<"_admin">>],undefined}}, sys_db]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T9:56:33.503,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.261.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 395 neighbours: [error_logger:info,2012-11-13T9:56:33.504,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.504,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.506,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.507,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.507,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.508,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.509,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.597.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.597.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.600.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.565,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.601.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.566,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.602.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.567,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.603.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.850,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.604.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.874,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.617.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.618.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.877,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.619.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.878,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.620.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.880,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.623.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.881,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.624.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.882,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.599.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.883,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.626.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.884,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.627.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.629.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.631.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.887,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.633.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.634.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.889,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.636.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.890,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.638.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.892,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.640.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.893,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.657.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.894,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.625.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.895,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.598.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:error,2012-11-13T9:56:33.897,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.620.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:33.896,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{9,56,33}}}, {memory, [{total,25813328}, {processes,7689912}, {processes_used,7559896}, {system,18123416}, {atom,1299409}, {atom_used,1270552}, {binary,228312}, {code,13004872}, {ets,1654792}]}, {loaded, [lib,mochinum,capi_utils,mochiweb_mime,mochiweb_io,mb_grid, mochijson2,set_view_update_daemon,mochiweb_response, xdc_rdoc_replication_srv,menelaus_web_buckets, compaction_daemon,menelaus_auth,stats_archiver, mochiweb_util,couch_httpd_view,system_stats_collector, mochiweb_request,couch_changes,ns_bucket_sup, mochiweb_headers,mochiweb,couch_set_view_util, geocouch_duplicates,xdc_rep_manager,ns_cluster_membership, ns_memcached_log_rotator,ns_port_server, supervisor_cushion,ns_port_init,ns_moxi_sup,ns_port_sup, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_util,menelaus_deps,menelaus_web,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,failover_safeness_level, mb_master_sup,cluster_compat_mode,gen_fsm, samples_loader_tasks,mb_master,xdc_replication_sup, remote_clusters_info,ns_bucket,ns_doctor,stats_reader, ns_heart,ns_mail_log,ns_mail_sup,work_queue, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, ns_node_disco,ns_node_disco_sup,ns_config_ets_dup,random, ns_log,ns_server_sup,ns_process_registry, cb_config_couch_sync,ns_config_log,ns_memcached,ns_pubsub, ns_config_isasl_sync,ns_config_replica,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, mnesia_index,mnesia_loader,file_sorter,dets_v9,dets_utils, dets_sup,dets_server,dets,mnesia_log,mnesia_snmp_hook, mnesia_checkpoint,mnesia_late_loader,mnesia_dumper, mnesia_snmp_sup,mnesia_checkpoint_sup,mnesia_frag, mnesia_tm,mnesia_recover,mnesia_sp,mnesia_locker, mnesia_event,mnesia_kernel_sup,mnesia_sup,mnesia_bup, mnesia_schema,mnesia_controller,mnesia_lib,mnesia_monitor, mnesia_subscr,mnesia,mb_mnesia,mb_mnesia_sup,ns_cluster, ns_cookie_manager,erl_epmd,inet_tcp_dist,gen_udp, inet_gethost_native,dist_manager,timeout_diag_logger, path_config,diag_handler,auth,ns_info,log_os_info, couch_config_writer,cb_init_loggers,mochiweb_acceptor, inet_tcp,gen_tcp,mochiweb_socket,mochiweb_socket_server, mochilists,mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view,snappy, couch_compress,couch_spatial_validation, couch_set_view_mapreduce,ejson,couch_doc, couch_db_update_notifier,couch_btree,couch_ref_counter, couch_uuids,couch_db_updater,couch_db,couch_auth_cache, couch_db_update_notifier_sup,couch_secondary_sup, couch_index_barrier,couch_event_sup,couch_log, couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {mnesia,"MNESIA CXC 138 12","4.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,170}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, ['sink-disk_xdcr','sink-disk_debug',ns_server_cluster_sup, 'sink-disk_couchdb','sink-disk_mapreduce_errors', couch_auth_cache,'sink-disk_views',erl_epmd, 'sink-disk_error',disk_log_sup,disk_log_server, code_server,application_controller,error_logger, couch_set_view,ale_sup,lhttpc_sup,ale_dynamic_sup, mochiweb_sup,auth,standard_error_sup,os_cmd_port_creator, kernel_safe_sup,lhttpc_manager,tftp_sup, couch_set_view_ddoc_cache,os_mon_sup, couch_index_merger_connection_pool,cpu_sup,couch_spatial, memsup,disksup,timer_server,couch_replica_index_barrier, couch_main_index_barrier,net_kernel,couch_replication, dist_manager,couch_task_events,rex,net_sup,couch_log, kernel_sup,global_name_server,file_server_2,cb_couch_sup, httpd_sup,ssl_connection_sup,'sink-disk_default', ssl_manager,ssl_broker_sup,ssl_server,sasl_safe_sup, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,ftp_sup,inets_sup,crypto_server, crypto_sup,sasl_sup,couch_secondary_services, couch_primary_services,couch_db_update, inet_gethost_native_sup,release_handler,couch_view, couch_uuids,overload,couch_task_status,alarm_handler, couch_server_sup,couch_server,dets_sup,dets,'sink-stderr', erl_prim_loader,couch_rep_sup,couch_query_servers, standard_error,init,couch_httpd,couch_file_write_guard, inet_gethost_native,couch_drv,inet_db, couch_db_update_notifier_sup,user,'sink-ns_log', couch_config,global_group,'sink-disk_stats', 'sink-disk_xdcr_errors']}, {cookie,bptrojzpwfmfrqou}, {wordsize,8}, {wall_clock,20}] [error_logger:info,2012-11-13T9:56:33.899,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.659.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:33.931,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:info,2012-11-13T9:56:33.935,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.662.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.937,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.663.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.938,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.664.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.939,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.666.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.676.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.960,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.678.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.962,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.679.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.963,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.680.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.965,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.681.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.597,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.682.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:34.598,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:440]Using existing disk schema on ['ns_1@127.0.0.1']. [ns_server:debug,2012-11-13T9:56:34.598,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-year' [ns_server:debug,2012-11-13T9:56:34.599,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-month' [ns_server:debug,2012-11-13T9:56:34.599,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-week' [ns_server:debug,2012-11-13T9:56:34.599,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-day' [error_logger:info,2012-11-13T9:56:34.599,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.704.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:56:34.600,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-hour' [ns_server:debug,2012-11-13T9:56:34.600,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-minute' [ns_server:debug,2012-11-13T9:56:34.601,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of local_config [error_logger:info,2012-11-13T9:56:34.601,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.705.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:56:34.601,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:ensure_schema:450]Have local copy of cluster [error_logger:info,2012-11-13T9:56:34.602,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.706.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.603,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.707.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.604,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.677.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:34.604,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T9:56:34.938,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:init:268]Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@127.0.0.1']}, {debug,verbose}, {directory,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/mnesia"}, {dump_log_load_regulation,false}, {dump_log_time_threshold,180000}, {dump_log_update_in_place,true}, {dump_log_write_threshold,1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup,false}, {fallback_error_function,{mnesia,lkill}}, {is_running,yes}, {local_tables,['stats_archiver-@system-minute','stats_archiver-@system-year', 'stats_archiver-@system-week','stats_archiver-@system-month', 'stats_archiver-@system-hour','stats_archiver-@system-day', local_config,cluster,schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{8,0}}, {running_db_nodes,['ns_1@127.0.0.1']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers,[<0.676.0>,<0.667.0>]}, {tables,['stats_archiver-@system-year','stats_archiver-@system-month', 'stats_archiver-@system-week','stats_archiver-@system-day', 'stats_archiver-@system-hour','stats_archiver-@system-minute', local_config,cluster,schema]}, {transaction_commits,2}, {transaction_failures,8}, {transaction_log_writes,0}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {send_compressed,0}, {version,"4.5"}] Peers: ['ns_1@127.0.0.1'] [ns_server:info,2012-11-13T9:56:34.940,ns_1@127.0.0.1:ns_config_sup<0.751.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T9:56:34.940,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.667.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.941,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.665.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:34.942,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T9:56:34.942,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.752.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:34.943,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [error_logger:info,2012-11-13T9:56:34.943,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.753.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:34.944,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:load_config:695]Here's full dynamic config we loaded: [[{uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,2391}, {nodes_wanted,['ns_1@127.0.0.1']}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}]] [ns_server:info,2012-11-13T9:56:34.951,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {xdcr_worker_batch_size,100}, {xdcr_num_worker_process,4}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_connection_timeout,60}, {xdcr_checkpoint_interval,1800}, {xdcr_capi_checkpoint_timeout,10}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,2391}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {buckets,[{configs,[]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}] [ns_server:debug,2012-11-13T9:56:34.958,ns_1@127.0.0.1:ns_config_isasl_sync<0.757.0>:ns_config_isasl_sync:init:53]isasl_sync init: ["/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw", "_admin","_admin"] [ns_server:debug,2012-11-13T9:56:34.959,ns_1@127.0.0.1:ns_config_isasl_sync<0.757.0>:ns_config_isasl_sync:init:61]isasl_sync init buckets: [] [error_logger:info,2012-11-13T9:56:34.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.754.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:34.959,ns_1@127.0.0.1:ns_config_isasl_sync<0.757.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [error_logger:info,2012-11-13T9:56:34.960,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.756.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T9:56:34.961,ns_1@127.0.0.1:ns_config_isasl_sync<0.757.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T9:56:35.962,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.757.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.963,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.760.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.964,ns_1@127.0.0.1:ns_node_disco<0.772.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [error_logger:info,2012-11-13T9:56:35.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.762.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.965,ns_1@127.0.0.1:ns_cookie_manager<0.663.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T9:56:35.965,ns_1@127.0.0.1:<0.773.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:35.966,ns_1@127.0.0.1:<0.773.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [error_logger:info,2012-11-13T9:56:35.966,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.751.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:56:35.966,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:init:66]init pulling [ns_server:info,2012-11-13T9:56:35.966,ns_1@127.0.0.1:ns_node_disco_events<0.771.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T9:56:35.967,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:init:68]init pushing [error_logger:info,2012-11-13T9:56:35.967,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.764.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.968,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2012-11-13T9:56:35.968,ns_1@127.0.0.1:ns_config_events<0.752.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [error_logger:info,2012-11-13T9:56:35.969,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.766.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.969,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: auto_failover_cfg -> [{enabled,false},{timeout,30},{max_nodes,1},{count,0}] [ns_server:debug,2012-11-13T9:56:35.969,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([auto_failover_cfg,autocompaction,buckets, cluster_compat_version,dynamic_config_version, email_alerts]..) [ns_server:debug,2012-11-13T9:56:35.969,ns_1@127.0.0.1:ns_config_events<0.752.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2012-11-13T9:56:35.970,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T9:56:35.970,ns_1@127.0.0.1:ns_cookie_manager<0.663.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [error_logger:info,2012-11-13T9:56:35.970,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.767.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.971,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[]}] [ns_server:debug,2012-11-13T9:56:35.971,ns_1@127.0.0.1:ns_log_events<0.768.0>:ns_mail_log:init:44]ns_mail_log started up [ns_server:debug,2012-11-13T9:56:35.971,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2012-11-13T9:56:35.972,ns_1@127.0.0.1:ns_cookie_manager<0.663.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T9:56:35.972,ns_1@127.0.0.1:<0.783.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:35.972,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: cluster_compat_version -> [2,0] [ns_server:debug,2012-11-13T9:56:35.972,ns_1@127.0.0.1:ns_server_sup<0.765.0>:mb_master:check_master_takeover_needed:144]Sending master node question to the following nodes: [] [ns_server:debug,2012-11-13T9:56:35.972,ns_1@127.0.0.1:<0.785.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [error_logger:info,2012-11-13T9:56:35.972,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.768.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.972,ns_1@127.0.0.1:<0.783.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:35.973,ns_1@127.0.0.1:ns_server_sup<0.765.0>:mb_master:check_master_takeover_needed:146]Got replies: [] [ns_server:debug,2012-11-13T9:56:35.973,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_local_xdcr_replications:307]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2012-11-13T9:56:35.973,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: dynamic_config_version -> [2,0] [ns_server:debug,2012-11-13T9:56:35.973,ns_1@127.0.0.1:<0.785.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T9:56:35.973,ns_1@127.0.0.1:ns_server_sup<0.765.0>:mb_master:check_master_takeover_needed:152]Was unable to discover master, not going to force mastership takeover [ns_server:error,2012-11-13T9:56:35.974,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2012-11-13T9:56:35.974,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.771.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.974,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [user:info,2012-11-13T9:56:35.974,ns_1@127.0.0.1:mb_master<0.799.0>:mb_master:init:89]I'm the only node, so I'm the master. [ns_server:debug,2012-11-13T9:56:35.975,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [error_logger:info,2012-11-13T9:56:35.975,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.772.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.976,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2012-11-13T9:56:35.976,ns_1@127.0.0.1:mb_master_sup<0.801.0>:misc:start_singleton:855]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.802.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T9:56:35.976,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: max_bucket_count -> 10 [ns_server:debug,2012-11-13T9:56:35.977,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: memory_quota -> 2391 [error_logger:info,2012-11-13T9:56:35.977,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.775.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.977,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T9:56:35.977,ns_1@127.0.0.1:mb_master_sup<0.801.0>:misc:start_singleton:855]start_singleton(gen_server, ns_tick, [], []): started as <0.806.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T9:56:35.978,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: otp -> [{cookie,bptrojzpwfmfrqou}] [error_logger:info,2012-11-13T9:56:35.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.776.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.978,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: remote_clusters -> [] [ns_server:debug,2012-11-13T9:56:35.978,ns_1@127.0.0.1:<0.807.0>:auto_failover:init:120]init auto_failover. [ns_server:debug,2012-11-13T9:56:35.979,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: replication -> [{enabled,true}] [ns_server:debug,2012-11-13T9:56:35.979,ns_1@127.0.0.1:mb_master_sup<0.801.0>:misc:start_singleton:855]start_singleton(gen_server, auto_failover, [], []): started as <0.807.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T9:56:35.979,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: rest -> [{port,8091}] [ns_server:info,2012-11-13T9:56:35.979,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [ns_server:debug,2012-11-13T9:56:35.980,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [error_logger:info,2012-11-13T9:56:35.980,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.777.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.980,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: uuid -> <<"b34a9c2e03786d913446a4e84919e1d5">> [ns_server:debug,2012-11-13T9:56:35.981,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: vbucket_map_history -> [] [ns_server:debug,2012-11-13T9:56:35.981,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_capi_checkpoint_timeout -> 10 [ns_server:debug,2012-11-13T9:56:35.982,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_checkpoint_interval -> 1800 [ns_server:debug,2012-11-13T9:56:35.982,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_connection_timeout -> 60 [ns_server:debug,2012-11-13T9:56:35.982,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_doc_batch_size_kb -> 512 [ns_server:debug,2012-11-13T9:56:35.982,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_failure_restart_interval -> 30 [ns_server:debug,2012-11-13T9:56:35.983,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_num_http_connections -> 20 [error_logger:info,2012-11-13T9:56:35.983,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.778.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2012-11-13T9:56:35.984,ns_1@127.0.0.1:ns_server_sup<0.765.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [ns_server:debug,2012-11-13T9:56:35.984,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_num_retries_per_request -> 2 [ns_server:debug,2012-11-13T9:56:35.985,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_num_worker_process -> 4 [ns_server:debug,2012-11-13T9:56:35.985,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: xdcr_worker_batch_size -> 100 [error_logger:info,2012-11-13T9:56:35.985,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.770.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:35.986,ns_1@127.0.0.1:<0.838.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [ns_server:debug,2012-11-13T9:56:35.986,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2012-11-13T9:56:35.986,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2012-11-13T9:56:35.986,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [error_logger:info,2012-11-13T9:56:35.987,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.781.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.987,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2012-11-13T9:56:35.988,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,0} [ns_server:debug,2012-11-13T9:56:35.988,ns_1@127.0.0.1:<0.841.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [ns_server:debug,2012-11-13T9:56:35.988,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}] [error_logger:info,2012-11-13T9:56:35.988,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.784.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.989,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2012-11-13T9:56:35.990,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}] [error_logger:info,2012-11-13T9:56:35.990,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.786.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.990,ns_1@127.0.0.1:<0.843.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [ns_server:debug,2012-11-13T9:56:35.991,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2012-11-13T9:56:35.992,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}] [error_logger:info,2012-11-13T9:56:35.992,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.787.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:35.993,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [error_logger:info,2012-11-13T9:56:35.994,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.789.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:35.995,ns_1@127.0.0.1:<0.846.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [ns_server:debug,2012-11-13T9:56:35.995,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [error_logger:info,2012-11-13T9:56:35.996,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.788.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:35.998,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.790.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.000,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.791.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.001,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.793.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.002,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.797.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.004,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.802.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.005,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.806.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.006,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.807.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.007,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.799.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.008,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.808.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.009,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.809.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.011,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.810.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.012,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.811.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:36.013,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:error,2012-11-13T9:56:36.014,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2012-11-13T9:56:36.014,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.812.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.016,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.815.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.017,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.832.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.018,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.833.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.019,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.834.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.020,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.814.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.021,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.836.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.022,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.837.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.023,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.838.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.024,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.835.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.025,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.840.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.029,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.841.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.036,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.843.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.041,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.839.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.042,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.845.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.043,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.846.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.044,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.848.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.045,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.849.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.064,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.850.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.065,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.861.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.067,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.859.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:56:36.066,ns_1@127.0.0.1:<0.870.0>:supervisor_cushion:init:43]starting compaction_daemon with delay of 3000 [ns_server:debug,2012-11-13T9:56:36.068,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [error_logger:info,2012-11-13T9:56:36.068,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.862.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:36.069,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T9:56:36.069,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.873.0>:xdc_rdoc_replication_srv:handle_info:132]doing replicate_newnodes_docs [error_logger:info,2012-11-13T9:56:36.070,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.864.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:36.070,ns_1@127.0.0.1:set_view_update_daemon<0.875.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2012-11-13T9:56:36.071,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.866.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.072,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.867.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.073,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.868.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.074,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.870.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.075,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.873.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:36.076,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([memory_quota]..) [error_logger:info,2012-11-13T9:56:36.077,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.875.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T9:56:36.078,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: memory_quota -> 2391 [error_logger:info,2012-11-13T9:56:36.079,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.877.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.080,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.765.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T9:57:06.069,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T9:57:06.070,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T9:57:36.070,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T9:57:36.070,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T9:58:06.071,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T9:58:06.071,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T9:58:36.072,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T9:58:36.072,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T9:59:06.073,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T9:59:06.073,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T9:59:36.074,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T9:59:36.074,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:00:06.075,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:00:06.075,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:error,2012-11-13T10:00:17.454,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:handle_call:623]Failed to update config: {exit,{not_found,"default"}} Stacktrace: [{ns_bucket,'-delete_bucket_returning_config/1-fun-0-',4}, {misc,key_update_rec,4}, {ns_config,'-update_sub_key/3-fun-0-',3}, {ns_config,'-update_key/2-fun-0-',3}, {ns_config,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}] [ns_server:debug,2012-11-13T10:00:17.457,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:00:17.457,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[[{map,[]}, {fastForwardMap,[]}, {uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,[]}]]}] [ns_server:debug,2012-11-13T10:00:17.457,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [menelaus:info,2012-11-13T10:00:17.457,ns_1@127.0.0.1:<0.816.0>:menelaus_web_buckets:do_bucket_create:415]Created bucket "default" of type: membase [{num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}] [ns_server:debug,2012-11-13T10:00:17.458,ns_1@127.0.0.1:ns_config_isasl_sync<0.757.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [ns_server:debug,2012-11-13T10:00:17.502,ns_1@127.0.0.1:<0.1677.0>:ns_janitor:cleanup_with_membase_bucket_check_servers:46]janitor decided to update servers list [ns_server:debug,2012-11-13T10:00:17.502,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:00:17.503,ns_1@127.0.0.1:ns_bucket_worker<0.848.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"default"}, {single_bucket_sup,start_link,["default"]}, permanent,infinity,supervisor, [single_bucket_sup]} [ns_server:debug,2012-11-13T10:00:17.503,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}]}]}] [ns_server:debug,2012-11-13T10:00:17.503,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2012-11-13T10:00:17.604,ns_1@127.0.0.1:<0.1684.0>:janitor_agent:new_style_query_vbucket_states_loop:116]Exception from query_vbucket_states of "default":'ns_1@127.0.0.1' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@127.0.0.1'}, query_vbucket_states,infinity]}}} [ns_server:debug,2012-11-13T10:00:17.605,ns_1@127.0.0.1:<0.1684.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:121]Waiting for "default" on 'ns_1@127.0.0.1' [ns_server:info,2012-11-13T10:00:17.719,ns_1@127.0.0.1:<0.842.0>:ns_port_server:log:171]moxi<0.842.0>: 2012-11-13 10:04:00: (agent_config.c.705) ERROR: bad JSON configuration from http://127.0.0.1:8091/pools/default/saslBucketsStreaming: Number of vBuckets must be a power of two > 0 and <= 65536 ({ moxi<0.842.0>: "name": "default", moxi<0.842.0>: "nodeLocator": "vbucket", moxi<0.842.0>: "saslPassword": "", moxi<0.842.0>: "nodes": [{ moxi<0.842.0>: "hostname": "127.0.0.1:8091", moxi<0.842.0>: "ports": { moxi<0.842.0>: "direct": 11210, moxi<0.842.0>: "proxy": 11211 moxi<0.842.0>: } moxi<0.842.0>: }], moxi<0.842.0>: "vBucketServerMap": { moxi<0.842.0>: "hashAlgorithm": "CRC", moxi<0.842.0>: "numReplicas": 1, moxi<0.842.0>: "serverList": ["127.0.0.1:11210"], moxi<0.842.0>: "vBucketMap": [] moxi<0.842.0>: } moxi<0.842.0>: }) [error_logger:info,2012-11-13T10:00:17.830,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.1685.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:00:18.012,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:init:218]Usable vbuckets: [] [ns_server:debug,2012-11-13T10:00:18.012,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [error_logger:info,2012-11-13T10:00:18.012,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1687.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:00:18.226,ns_1@127.0.0.1:<0.838.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:debug,2012-11-13T10:00:18.248,ns_1@127.0.0.1:<0.838.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.1704.0> [ns_server:info,2012-11-13T10:00:18.274,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:ensure_bucket:1119]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=2507145216;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:00:18.275,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1703.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.310,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1709.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.329,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1710.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:00:18.337,ns_1@127.0.0.1:janitor_agent-default<0.1712.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [error_logger:info,2012-11-13T10:00:18.337,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1711.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:00:18.338,ns_1@127.0.0.1:janitor_agent-default<0.1712.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:00:18.339,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1712.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.356,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1713.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:00:18.373,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-default-minute' [error_logger:info,2012-11-13T10:00:18.373,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1714.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.374,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1716.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.375,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1718.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.376,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1719.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:00:18.425,ns_1@127.0.0.1:ns_port_memcached<0.844.0>:ns_port_server:log:171]memcached<0.844.0>: Tue Nov 13 10:00:18.225294 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.844.0>: Tue Nov 13 10:00:18.226492 PST 3: Connected to mccouch: "localhost:11213" memcached<0.844.0>: Tue Nov 13 10:00:18.273915 PST 3: Warning: failed to load the engine session stats due to IO exception "basic_ios::clear" memcached<0.844.0>: Tue Nov 13 10:00:18.274020 PST 3: Failed to load mutation log, falling back to key dump memcached<0.844.0>: Tue Nov 13 10:00:18.274063 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.844.0>: Tue Nov 13 10:00:18.274235 PST 3: metadata loaded in 24 ms memcached<0.844.0>: Tue Nov 13 10:00:18.299836 PST 3: warmup completed in 24 ms [ns_server:debug,2012-11-13T10:00:18.476,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {error,{exit,{aborted,{no_exists,['stats_archiver-default-minute']}}}} [ns_server:debug,2012-11-13T10:00:18.532,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:00:18.533,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-minute', [{name,'stats_archiver-default-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,373432},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,577,<0.1721.0>}} [user:info,2012-11-13T10:00:18.533,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:handle_cast:581]Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:debug,2012-11-13T10:00:18.533,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-minute', [{name,'stats_archiver-default-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,373432},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,577,<0.1721.0>}} [ns_server:debug,2012-11-13T10:00:18.534,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-default-hour' [ns_server:debug,2012-11-13T10:00:18.704,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-hour', [{name,'stats_archiver-default-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,534985},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,579,<0.1737.0>}} [ns_server:debug,2012-11-13T10:00:18.705,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-hour', [{name,'stats_archiver-default-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,534985},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,579,<0.1737.0>}} [ns_server:debug,2012-11-13T10:00:18.706,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-default-day' [ns_server:debug,2012-11-13T10:00:18.734,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {error,{exit,{badarg,[{erlang,hd,[[]]}, {stats_reader,'-do_handle_call/3-fun-0-',2}, {mnesia_tm,non_transaction,5}, {stats_reader,do_handle_call,3}, {stats_reader,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}}} [ns_server:info,2012-11-13T10:00:18.736,ns_1@127.0.0.1:<0.1677.0>:ns_janitor:cleanup_with_membase_bucket_check_map:67]janitor decided to generate initial vbucket map [ns_server:debug,2012-11-13T10:00:18.849,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-day', [{name,'stats_archiver-default-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,706325},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,580,<0.1744.0>}} [ns_server:debug,2012-11-13T10:00:18.850,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-day', [{name,'stats_archiver-default-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,706325},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,580,<0.1744.0>}} [ns_server:debug,2012-11-13T10:00:18.850,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-default-week' [ns_server:debug,2012-11-13T10:00:18.972,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-week', [{name,'stats_archiver-default-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,851175},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,581,<0.1757.0>}} [ns_server:debug,2012-11-13T10:00:18.973,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-week', [{name,'stats_archiver-default-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,851175},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,581,<0.1757.0>}} [ns_server:debug,2012-11-13T10:00:18.973,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-default-month' [ns_server:debug,2012-11-13T10:00:19.004,ns_1@127.0.0.1:<0.1677.0>:mb_map:generate_map:186]Natural map score: {64,0,0} [ns_server:debug,2012-11-13T10:00:19.005,ns_1@127.0.0.1:<0.1677.0>:mb_map:generate_map:193]Rnd maps scores: {64,0,0}, {64,0,0} [ns_server:debug,2012-11-13T10:00:19.005,ns_1@127.0.0.1:<0.1677.0>:mb_map:generate_map:207]Considering 1 maps: [{64,0,0}] [ns_server:debug,2012-11-13T10:00:19.005,ns_1@127.0.0.1:<0.1677.0>:mb_map:generate_map:219]Best map score: {64,0,0} (true,true,true) [ns_server:debug,2012-11-13T10:00:19.006,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:00:19.007,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets,vbucket_map_history]..) [ns_server:debug,2012-11-13T10:00:19.008,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:00:19.009,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:00:19.009,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:00:19.009,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: vbucket_map_history -> [{[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}] [ns_server:debug,2012-11-13T10:00:19.009,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:00:19.011,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[{"default", [{map,[{0,[],['ns_1@127.0.0.1',undefined]}, {1,[],['ns_1@127.0.0.1',undefined]}, {2,[],['ns_1@127.0.0.1',undefined]}, {3,[],['ns_1@127.0.0.1',undefined]}, {4,[],['ns_1@127.0.0.1',undefined]}, {5,[],['ns_1@127.0.0.1',undefined]}, {6,[],['ns_1@127.0.0.1',undefined]}, {7,[],['ns_1@127.0.0.1',undefined]}, {8,[],['ns_1@127.0.0.1',undefined]}, {9,[],['ns_1@127.0.0.1',undefined]}, {10,[],['ns_1@127.0.0.1',undefined]}, {11,[],['ns_1@127.0.0.1',undefined]}, {12,[],['ns_1@127.0.0.1',undefined]}, {13,[],['ns_1@127.0.0.1',undefined]}, {14,[],['ns_1@127.0.0.1',undefined]}, {15,[],['ns_1@127.0.0.1',undefined]}, {16,[],['ns_1@127.0.0.1',undefined]}, {17,[],['ns_1@127.0.0.1',undefined]}, {18,[],['ns_1@127.0.0.1',undefined]}, {19,[],['ns_1@127.0.0.1',undefined]}, {20,[],['ns_1@127.0.0.1',undefined]}, {21,[],['ns_1@127.0.0.1',undefined]}, {22,[],['ns_1@127.0.0.1',undefined]}, {23,[],['ns_1@127.0.0.1',undefined]}, {24,[],['ns_1@127.0.0.1',undefined]}, {25,[],['ns_1@127.0.0.1',undefined]}, {26,[],['ns_1@127.0.0.1',undefined]}, {27,[],['ns_1@127.0.0.1',undefined]}, {28,[],['ns_1@127.0.0.1',undefined]}, {29,[],['ns_1@127.0.0.1',undefined]}, {30,[],['ns_1@127.0.0.1',undefined]}, {31,[],['ns_1@127.0.0.1',undefined]}, {32,[],['ns_1@127.0.0.1',undefined]}, {33,[],['ns_1@127.0.0.1',undefined]}, {34,[],['ns_1@127.0.0.1',undefined]}, {35,[],['ns_1@127.0.0.1',undefined]}, {36,[],['ns_1@127.0.0.1',undefined]}, {37,[],['ns_1@127.0.0.1',undefined]}, {38,[],['ns_1@127.0.0.1',undefined]}, {39,[],['ns_1@127.0.0.1',undefined]}, {40,[],['ns_1@127.0.0.1',undefined]}, {41,[],['ns_1@127.0.0.1',undefined]}, {42,[],['ns_1@127.0.0.1',undefined]}, {43,[],['ns_1@127.0.0.1',undefined]}, {44,[],['ns_1@127.0.0.1',undefined]}, {45,[],['ns_1@127.0.0.1',undefined]}, {46,[],['ns_1@127.0.0.1',undefined]}, {47,[],['ns_1@127.0.0.1',undefined]}, {48,[],['ns_1@127.0.0.1',undefined]}, {49,[],['ns_1@127.0.0.1',undefined]}, {50,[],['ns_1@127.0.0.1',undefined]}, {51,[],['ns_1@127.0.0.1',undefined]}, {52,[],['ns_1@127.0.0.1',undefined]}, {53,[],['ns_1@127.0.0.1',undefined]}, {54,[],['ns_1@127.0.0.1',undefined]}, {55,[],['ns_1@127.0.0.1',undefined]}, {56,[],['ns_1@127.0.0.1',undefined]}, {57,[],['ns_1@127.0.0.1',undefined]}, {58,[],['ns_1@127.0.0.1',undefined]}, {59,[],['ns_1@127.0.0.1',undefined]}, {60,[],['ns_1@127.0.0.1',undefined]}, {61,[],['ns_1@127.0.0.1',undefined]}, {62,[],['ns_1@127.0.0.1',undefined]}, {63,[],['ns_1@127.0.0.1',undefined]}]}, {fastForwardMap,[]}, {uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}]}]}] [ns_server:info,2012-11-13T10:00:19.077,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:info,2012-11-13T10:00:19.078,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:00:19.078,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:00:19.079,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:00:19.080,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [ns_server:info,2012-11-13T10:00:19.080,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:info,2012-11-13T10:00:19.081,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [ns_server:info,2012-11-13T10:00:19.082,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:00:19.082,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:00:19.083,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:00:19.083,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [ns_server:info,2012-11-13T10:00:19.084,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [ns_server:info,2012-11-13T10:00:19.084,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:info,2012-11-13T10:00:19.084,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:info,2012-11-13T10:00:19.085,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:info,2012-11-13T10:00:19.085,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:00:19.086,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [ns_server:info,2012-11-13T10:00:19.086,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:info,2012-11-13T10:00:19.087,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [ns_server:info,2012-11-13T10:00:19.087,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:info,2012-11-13T10:00:19.087,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:00:19.088,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:00:19.088,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:00:19.089,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [ns_server:info,2012-11-13T10:00:19.089,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:info,2012-11-13T10:00:19.090,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:00:19.090,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:00:19.091,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [ns_server:info,2012-11-13T10:00:19.091,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:00:19.117,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:00:19.118,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:00:19.118,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [ns_server:info,2012-11-13T10:00:19.119,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:info,2012-11-13T10:00:19.119,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:00:19.120,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:00:19.120,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:00:19.121,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:00:19.121,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [ns_server:info,2012-11-13T10:00:19.122,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:info,2012-11-13T10:00:19.122,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:00:19.123,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:00:19.123,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:00:19.124,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [ns_server:info,2012-11-13T10:00:19.124,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:info,2012-11-13T10:00:19.124,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:00:19.125,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:debug,2012-11-13T10:00:19.230,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-month', [{name,'stats_archiver-default-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,974112},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,582,<0.1764.0>}} [ns_server:debug,2012-11-13T10:00:19.231,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-month', [{name,'stats_archiver-default-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829618,974112},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,582,<0.1764.0>}} [ns_server:debug,2012-11-13T10:00:19.232,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-default-year' [ns_server:info,2012-11-13T10:00:19.289,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [ns_server:info,2012-11-13T10:00:19.289,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:00:19.290,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:00:19.291,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:00:19.291,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [ns_server:info,2012-11-13T10:00:19.292,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:info,2012-11-13T10:00:19.292,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:00:19.292,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:00:19.293,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [ns_server:info,2012-11-13T10:00:19.293,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [ns_server:info,2012-11-13T10:00:19.293,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:00:19.294,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:00:19.294,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [ns_server:info,2012-11-13T10:00:19.295,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:00:19.295,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:00:19.295,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [ns_server:info,2012-11-13T10:00:19.296,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [ns_server:info,2012-11-13T10:00:19.296,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:00:19.297,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "default" [ns_server:info,2012-11-13T10:00:19.297,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:handle_call:248]Bucket "default" marked as warmed in 1 seconds [views:debug,2012-11-13T10:00:19.456,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:19.456,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "?" [ns_server:debug,2012-11-13T10:00:19.456,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",63,active,0} [ns_server:debug,2012-11-13T10:00:19.567,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-year', [{name,'stats_archiver-default-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829619,232633},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,584,<0.1778.0>}} [ns_server:debug,2012-11-13T10:00:19.568,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-default-year', [{name,'stats_archiver-default-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,829619,232633},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,584,<0.1778.0>}} [views:debug,2012-11-13T10:00:19.827,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:19.827,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "?>" [ns_server:debug,2012-11-13T10:00:19.827,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",62,active,0} [views:debug,2012-11-13T10:00:19.935,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:19.935,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",61,active,0} [ns_server:debug,2012-11-13T10:00:19.935,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "?>=" [views:debug,2012-11-13T10:00:20.106,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.106,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",60,active,0} [ns_server:debug,2012-11-13T10:00:20.106,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "=" [views:debug,2012-11-13T10:00:20.203,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.203,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",59,active,0} [ns_server:debug,2012-11-13T10:00:20.203,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "=" [views:debug,2012-11-13T10:00:20.304,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.304,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: ":=" [ns_server:debug,2012-11-13T10:00:20.304,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",58,active,0} [views:debug,2012-11-13T10:00:20.404,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.405,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "9:=" [ns_server:debug,2012-11-13T10:00:20.405,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",57,active,0} [views:debug,2012-11-13T10:00:20.505,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.505,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "9:=" [ns_server:debug,2012-11-13T10:00:20.505,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",56,active,0} [views:debug,2012-11-13T10:00:20.605,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.606,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "97:=" [ns_server:debug,2012-11-13T10:00:20.606,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",55,active,0} [views:debug,2012-11-13T10:00:20.717,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.718,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "697:=" [ns_server:debug,2012-11-13T10:00:20.718,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",54,active,0} [views:debug,2012-11-13T10:00:20.818,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.818,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "697:=" [ns_server:debug,2012-11-13T10:00:20.819,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",53,active,0} [views:debug,2012-11-13T10:00:20.930,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:20.930,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "6947:=" [ns_server:debug,2012-11-13T10:00:20.930,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",52,active,0} [ns_server:info,2012-11-13T10:00:20.999,ns_1@127.0.0.1:ns_doctor<0.793.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["default"] [views:debug,2012-11-13T10:00:21.044,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.045,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "36947:=" [ns_server:debug,2012-11-13T10:00:21.045,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",51,active,0} [views:debug,2012-11-13T10:00:21.166,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.167,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",50,active,0} [ns_server:debug,2012-11-13T10:00:21.167,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "36947:=" [views:debug,2012-11-13T10:00:21.295,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.295,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "369147:=" [ns_server:debug,2012-11-13T10:00:21.295,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",49,active,0} [views:debug,2012-11-13T10:00:21.487,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.487,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369147:=" [ns_server:debug,2012-11-13T10:00:21.487,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",48,active,0} [views:debug,2012-11-13T10:00:21.625,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.625,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369147:=" [ns_server:debug,2012-11-13T10:00:21.625,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",47,active,0} [views:debug,2012-11-13T10:00:21.747,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.748,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369.147:=" [ns_server:debug,2012-11-13T10:00:21.748,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",46,active,0} [views:debug,2012-11-13T10:00:21.859,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.860,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369.147:=-" [ns_server:debug,2012-11-13T10:00:21.860,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",45,active,0} [views:debug,2012-11-13T10:00:21.982,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:21.982,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369<,?/258;>.147:=-" [ns_server:debug,2012-11-13T10:00:21.983,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",44,active,0} [views:debug,2012-11-13T10:00:22.094,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.095,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369<,?/258;+>.147:=-" [ns_server:debug,2012-11-13T10:00:22.095,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",43,active,0} [views:debug,2012-11-13T10:00:22.206,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.206,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369<,?/258;+>.147:*=-" [ns_server:debug,2012-11-13T10:00:22.206,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",42,active,0} [views:debug,2012-11-13T10:00:22.324,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.324,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369)<,?/258;+>.147:*=-" [ns_server:debug,2012-11-13T10:00:22.324,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",41,active,0} [views:debug,2012-11-13T10:00:22.430,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.430,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369)<,?/258(;+>.147:*=-" [ns_server:debug,2012-11-13T10:00:22.430,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",40,active,0} [views:debug,2012-11-13T10:00:22.541,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.542,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369)<,?/258(;+>.147':*=-" [ns_server:debug,2012-11-13T10:00:22.542,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",39,active,0} [views:debug,2012-11-13T10:00:22.643,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.643,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "036&9)<,?/258(;+>.147':*=-" [ns_server:debug,2012-11-13T10:00:22.643,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",38,active,0} [views:debug,2012-11-13T10:00:22.754,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.755,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "036&9)<,?/25%8(;+>.147':*=-" [ns_server:debug,2012-11-13T10:00:22.755,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",37,active,0} [views:debug,2012-11-13T10:00:22.857,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.857,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "036&9)<,?/25%8(;+>.14$7':*=-" [ns_server:debug,2012-11-13T10:00:22.857,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",36,active,0} [views:debug,2012-11-13T10:00:22.947,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:22.947,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "03#6&9)<,?/25%8(;+>.14$7':*=-" [ns_server:debug,2012-11-13T10:00:22.947,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",35,active,0} [views:debug,2012-11-13T10:00:23.047,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.048,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "03#6&9)<,?/2\"5%8(;+>.14$7':*=-" [ns_server:debug,2012-11-13T10:00:23.048,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",34,active,0} [views:debug,2012-11-13T10:00:23.148,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.148,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "03#6&9)<,?/2\"5%8(;+>.1!4$7':*=-" [ns_server:debug,2012-11-13T10:00:23.148,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",33,active,0} [views:debug,2012-11-13T10:00:23.249,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.249,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0 3#6&9)<,?/2\"5%8(;+>.1!4$7':*=-" [ns_server:debug,2012-11-13T10:00:23.249,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",32,active,0} [views:debug,2012-11-13T10:00:23.349,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.350,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,63,47,31,50,34,53,37,56,40,59,43,62,46,49,33, 52,36,55,39,58,42,61,45] [ns_server:debug,2012-11-13T10:00:23.350,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",31,active,0} [ns_server:debug,2012-11-13T10:00:23.451,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 0 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:00:23.451,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.451,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",30,active,0} [ns_server:debug,2012-11-13T10:00:23.451,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,63,47,31,50,34,53,37,56,40,59,43,62,46,30,49, 33,52,36,55,39,58,42,61,45] [ns_server:debug,2012-11-13T10:00:23.452,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 1 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.453,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 2 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.453,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 3 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.564,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 4 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:00:23.564,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.564,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",29,active,0} [ns_server:debug,2012-11-13T10:00:23.565,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,63,47,31,50,34,53,37,56,40,59,43,62,46,30,49, 33,52,36,55,39,58,42,61,45,29] [ns_server:debug,2012-11-13T10:00:23.565,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 5 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.566,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 6 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.567,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 7 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.568,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 8 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:00:23.675,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.675,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 9 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.675,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",28,active,0} [ns_server:debug,2012-11-13T10:00:23.675,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,28,63,47,31,50,34,53,37,56,40,59,43,62,46,30, 49,33,52,36,55,39,58,42,61,45,29] [ns_server:debug,2012-11-13T10:00:23.677,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 10 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.678,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 11 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:00:23.787,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.787,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 12 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.787,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",27,active,0} [ns_server:debug,2012-11-13T10:00:23.787,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,28,63,47,31,50,34,53,37,56,40,59,43,27,62,46, 30,49,33,52,36,55,39,58,42,61,45,29] [ns_server:debug,2012-11-13T10:00:23.787,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 13 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.788,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 14 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.789,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 15 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.887,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 16 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:00:23.887,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:23.888,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",26,active,0} [ns_server:debug,2012-11-13T10:00:23.888,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,28,63,47,31,50,34,53,37,56,40,59,43,27,62,46, 30,49,33,52,36,55,39,58,42,26,61,45,29] [ns_server:debug,2012-11-13T10:00:23.888,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 17 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.889,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 18 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:23.890,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 19 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:24.010,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 20 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:00:24.011,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.011,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",25,active,0} [ns_server:debug,2012-11-13T10:00:24.011,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,59,43,27,62, 46,30,49,33,52,36,55,39,58,42,26,61,45,29] [ns_server:debug,2012-11-13T10:00:24.011,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 21 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:24.013,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 22 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:00:24.014,ns_1@127.0.0.1:couch_stats_reader-default<0.1713.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 23 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:00:24.111,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.111,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",24,active,0} [ns_server:debug,2012-11-13T10:00:24.111,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,24,59,43,27, 62,46,30,49,33,52,36,55,39,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.212,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.212,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",23,active,0} [ns_server:debug,2012-11-13T10:00:24.212,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,24,59,43,27, 62,46,30,49,33,52,36,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.314,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.314,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",22,active,0} [ns_server:debug,2012-11-13T10:00:24.314,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,24,59,43, 27,62,46,30,49,33,52,36,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.437,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.438,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",21,active,0} [ns_server:debug,2012-11-13T10:00:24.438,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,21,56,40,24,59, 43,27,62,46,30,49,33,52,36,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.538,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.540,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",20,active,0} [ns_server:debug,2012-11-13T10:00:24.540,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,21,56,40,24,59, 43,27,62,46,30,49,33,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.650,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.650,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",19,active,0} [ns_server:debug,2012-11-13T10:00:24.650,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,21,56,40,24, 59,43,27,62,46,30,49,33,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.750,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.751,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",18,active,0} [ns_server:debug,2012-11-13T10:00:24.751,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,18,53,37,21,56,40, 24,59,43,27,62,46,30,49,33,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.863,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.863,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",17,active,0} [ns_server:debug,2012-11-13T10:00:24.863,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,18,53,37,21,56,40, 24,59,43,27,62,46,30,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:24.964,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:24.965,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",16,active,0} [ns_server:debug,2012-11-13T10:00:24.964,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,18,53,37,21,56, 40,24,59,43,27,62,46,30,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:25.075,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.076,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",15,active,0} [ns_server:debug,2012-11-13T10:00:25.076,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,15,50,34,18,53,37,21, 56,40,24,59,43,27,62,46,30,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:25.181,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.182,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",14,active,0} [ns_server:debug,2012-11-13T10:00:25.181,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,15,50,34,18,53,37,21, 56,40,24,59,43,27,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:00:25.288,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.289,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",13,active,0} [ns_server:debug,2012-11-13T10:00:25.288,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,15,50,34,18,53,37,21, 56,40,24,59,43,27,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29, 13] [views:debug,2012-11-13T10:00:25.389,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.390,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",12,active,0} [ns_server:debug,2012-11-13T10:00:25.390,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61,45, 29,13] [views:debug,2012-11-13T10:00:25.534,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.535,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",11,active,0} [ns_server:debug,2012-11-13T10:00:25.535,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61, 45,29,13] [views:debug,2012-11-13T10:00:25.669,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.731,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",10,active,0} [ns_server:debug,2012-11-13T10:00:25.731,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,10, 61,45,29,13] [views:debug,2012-11-13T10:00:25.860,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.861,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",9,active,0} [ns_server:debug,2012-11-13T10:00:25.861,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,10, 61,45,29,13] [views:debug,2012-11-13T10:00:25.961,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:25.962,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",8,active,0} [ns_server:debug,2012-11-13T10:00:25.962,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,10, 61,45,29,13] [views:debug,2012-11-13T10:00:26.107,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.108,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",7,active,0} [ns_server:debug,2012-11-13T10:00:26.108,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,7,55,39,23,58,42,26, 10,61,45,29,13] [views:debug,2012-11-13T10:00:26.308,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.309,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",6,active,0} [ns_server:debug,2012-11-13T10:00:26.309,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53, 37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,7,55,39,23,58,42, 26,10,61,45,29,13] [views:debug,2012-11-13T10:00:26.420,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.421,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",5,active,0} [ns_server:debug,2012-11-13T10:00:26.421,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53, 5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,7,55,39,23,58, 42,26,10,61,45,29,13] [views:debug,2012-11-13T10:00:26.532,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.533,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",4,active,0} [ns_server:debug,2012-11-13T10:00:26.533,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53, 5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,4,36,20,7,55,39,23,58, 42,26,10,61,45,29,13] [views:debug,2012-11-13T10:00:26.633,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.634,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",3,active,0} [ns_server:debug,2012-11-13T10:00:26.634,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18, 53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,4,36,20,7,55,39,23, 58,42,26,10,61,45,29,13] [views:debug,2012-11-13T10:00:26.734,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.734,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",2,active,0} [ns_server:debug,2012-11-13T10:00:26.734,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2,18, 53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,4,36,20,7,55,39,23, 58,42,26,10,61,45,29,13] [views:debug,2012-11-13T10:00:26.846,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.846,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",1,active,0} [ns_server:debug,2012-11-13T10:00:26.846,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2,18, 53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,1,52,4,36,20,7,55,39, 23,58,42,26,10,61,45,29,13] [views:debug,2012-11-13T10:00:26.935,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: active (0) [ns_server:debug,2012-11-13T10:00:26.936,ns_1@127.0.0.1:<0.1704.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",0,active,0} [ns_server:debug,2012-11-13T10:00:26.936,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,0,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2, 18,53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,1,52,4,36,20,7,55, 39,23,58,42,26,10,61,45,29,13] [ns_server:debug,2012-11-13T10:00:36.076,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:00:36.077,ns_1@127.0.0.1:<0.2255.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:00:36.111,ns_1@127.0.0.1:<0.2255.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:00:36.119,ns_1@127.0.0.1:<0.2258.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 535470 [ns_server:debug,2012-11-13T10:00:36.120,ns_1@127.0.0.1:<0.2258.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:00:36.120,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:00:36.120,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:01:06.121,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:01:06.122,ns_1@127.0.0.1:<0.2388.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:01:06.124,ns_1@127.0.0.1:<0.2388.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:01:06.132,ns_1@127.0.0.1:<0.2391.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 535470 [ns_server:debug,2012-11-13T10:01:06.132,ns_1@127.0.0.1:<0.2391.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:01:06.133,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:01:06.133,ns_1@127.0.0.1:compaction_daemon<0.871.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:01:13.372,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:01:13.372,ns_1@127.0.0.1:ns_config_rep<0.778.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([settings]..) [ns_server:debug,2012-11-13T10:01:13.374,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:log_common:111]config change: settings -> [{stats,[{send_stats,false}]}] [ns_server:debug,2012-11-13T10:01:13.374,ns_1@127.0.0.1:capi_set_view_manager-default<0.1687.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:01:24.321,ns_1@127.0.0.1:<0.872.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.871.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.321,ns_1@127.0.0.1:<0.876.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.875.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.321,ns_1@127.0.0.1:<0.874.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_node_disco_events,<0.873.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.321,ns_1@127.0.0.1:<0.869.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.868.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.321,ns_1@127.0.0.1:<0.865.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_stats_event,<0.864.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.321,ns_1@127.0.0.1:<0.863.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_tick_event,<0.862.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.321,ns_1@127.0.0.1:<0.1685.0>:single_bucket_sup:top_loop:28]Delegating exit {'EXIT',<0.859.0>,shutdown} to child supervisor: <0.1686.0> [ns_server:debug,2012-11-13T10:01:24.322,ns_1@127.0.0.1:<0.1717.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_stats_event,<0.1716.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.322,ns_1@127.0.0.1:<0.1715.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_tick_event,<0.1714.0>} exited with reason shutdown [user:info,2012-11-13T10:01:24.323,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:terminate:661]Shutting down bucket "default" on 'ns_1@127.0.0.1' for server shutdown [ns_server:info,2012-11-13T10:01:24.323,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:terminate:672]This bucket shutdown is not due to bucket deletion. Doing nothing [ns_server:debug,2012-11-13T10:01:24.324,ns_1@127.0.0.1:<0.1701.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_node_disco_events,<0.1687.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.324,ns_1@127.0.0.1:<0.1688.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.1687.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.324,ns_1@127.0.0.1:<0.1702.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {mc_couch_events,<0.1687.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.324,ns_1@127.0.0.1:<0.1685.0>:single_bucket_sup:top_loop:24]per-bucket supervisor for "default" died with reason shutdown [ns_server:debug,2012-11-13T10:01:24.325,ns_1@127.0.0.1:<0.861.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.859.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.325,ns_1@127.0.0.1:ns_port_memcached<0.844.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:debug,2012-11-13T10:01:24.325,ns_1@127.0.0.1:<0.847.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.845.0>} exited with reason killed [error_logger:error,2012-11-13T10:01:24.326,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.861.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:24.524,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2012-11-13T10:01:24.524,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@127.0.0.1'}, {latest,minute}]}}} [ns_server:debug,2012-11-13T10:01:24.525,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_local_xdcr_replications:307]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:info,2012-11-13T10:01:24.526,ns_1@127.0.0.1:ns_port_memcached<0.844.0>:ns_port_server:log:171]memcached<0.844.0>: EOL on stdin. Initiating shutdown memcached<0.844.0>: Tue Nov 13 10:01:24.491710 PST 3: Shutting down tap connections! memcached<0.844.0>: Tue Nov 13 10:01:24.492891 PST 3: Had to wait 1081 usec for shutdown [ns_server:error,2012-11-13T10:01:24.526,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:info,2012-11-13T10:01:24.533,ns_1@127.0.0.1:<0.1704.0>:mc_connection:run_loop:202]mccouch connection was normally closed [ns_server:info,2012-11-13T10:01:24.533,ns_1@127.0.0.1:ns_port_memcached<0.844.0>:ns_port_server:handle_info:104]Port server memcached exited with status 0 [ns_server:debug,2012-11-13T10:01:24.534,ns_1@127.0.0.1:<0.842.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:info,2012-11-13T10:01:24.534,ns_1@127.0.0.1:<0.842.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:01:24.535,ns_1@127.0.0.1:<0.842.0>:ns_port_server:log:171]moxi<0.842.0>: EOL on stdin. Exiting [ns_server:info,2012-11-13T10:01:24.536,ns_1@127.0.0.1:mb_master<0.799.0>:mb_master:terminate:288]Synchronously shutting down child mb_master_sup [ns_server:debug,2012-11-13T10:01:24.535,ns_1@127.0.0.1:<0.813.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {master_activity_events,<0.812.0>} exited with reason killed [ns_server:debug,2012-11-13T10:01:24.536,ns_1@127.0.0.1:<0.800.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.799.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.536,ns_1@127.0.0.1:<0.794.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.793.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.552,ns_1@127.0.0.1:<0.792.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {buckets_events,<0.791.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.552,ns_1@127.0.0.1:<0.782.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.781.0>} exited with reason killed [ns_server:debug,2012-11-13T10:01:24.552,ns_1@127.0.0.1:<0.779.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events_local,<0.778.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.552,ns_1@127.0.0.1:<0.769.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.767.0>} exited with reason killed [ns_server:debug,2012-11-13T10:01:24.654,ns_1@127.0.0.1:<0.763.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.762.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.654,ns_1@127.0.0.1:<0.758.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.757.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:24.654,ns_1@127.0.0.1:<0.761.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.760.0>} exited with reason shutdown [error_logger:error,2012-11-13T10:01:24.655,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.764.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:24.654,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:wait_saver:539]Waited for saver done. State= {config, {full, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", undefined,ns_config_default}, [[], [{xdcr_checkpoint_interval,1800}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_worker_batch_size,100}, {xdcr_connection_timeout,60}, {xdcr_num_worker_process,4}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {directory, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config"}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {rest_creds,[{creds,[]}]}, {remote_clusters,[]}, {{node,'ns_1@127.0.0.1',isasl}, [{path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {memory_quota,2391}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}]], [[{{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {xdcr_worker_batch_size,100}, {xdcr_num_worker_process,4}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_connection_timeout,60}, {xdcr_checkpoint_interval,1800}, {xdcr_capi_checkpoint_timeout,10}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {memory_quota,2391}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} [ns_server:info,2012-11-13T10:01:24.830,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:terminate:277]Shut Mnesia down: shutdown. Exiting. [error_logger:info,2012-11-13T10:01:24.830,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mnesia exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:24.856,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.614.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.614.0>,<0.615.0>,nil,<<"1352829393849668">>, <0.611.0>,<0.616.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.611.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.611.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.611.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.861,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.614.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 241 neighbours: [error_logger:error,2012-11-13T10:01:24.862,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1905.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1905.0>,<0.1906.0>,nil,<<"1352829621747368">>, <0.1902.0>,<0.1907.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1902.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1902.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1902.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/46">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/46.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1905.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 573 neighbours: [error_logger:error,2012-11-13T10:01:24.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.608.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.608.0>,<0.609.0>,nil,<<"1352829393812330">>, <0.605.0>,<0.610.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.605.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.605.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.605.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.608.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 328 neighbours: [error_logger:error,2012-11-13T10:01:24.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1967.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1967.0>,<0.1968.0>,nil,<<"1352829622857071">>, <0.1964.0>,<0.1969.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1964.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1964.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1964.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/36">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/36.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1967.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 565 neighbours: [error_logger:error,2012-11-13T10:01:24.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1858.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1858.0>,<0.1859.0>,nil,<<"1352829620929634">>, <0.1855.0>,<0.1860.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1855.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1855.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1855.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/52">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/52.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.880,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1858.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 584 neighbours: [error_logger:error,2012-11-13T10:01:24.896,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2079.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2079.0>,<0.2080.0>,nil,<<"1352829624750495">>, <0.2076.0>,<0.2081.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2076.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2076.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2076.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/18">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/18.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.900,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2079.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.901,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2055.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2055.0>,<0.2056.0>,nil,<<"1352829624313759">>, <0.2052.0>,<0.2057.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2052.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2052.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2052.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/22">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/22.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.905,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2055.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.906,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2085.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2085.0>,<0.2086.0>,nil,<<"1352829624862447">>, <0.2082.0>,<0.2087.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2082.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2082.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2082.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/17">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/17.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2085.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2121.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2121.0>,<0.2122.0>,nil,<<"1352829625534387">>, <0.2118.0>,<0.2123.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2118.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2118.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2118.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/11">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/11.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.916,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2121.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 616 neighbours: [error_logger:error,2012-11-13T10:01:24.917,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2176.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2176.0>,<0.2177.0>,nil,<<"1352829626420340">>, <0.2173.0>,<0.2178.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2173.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2173.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2173.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/5">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/5.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2176.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 611 neighbours: [error_logger:error,2012-11-13T10:01:24.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2170.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2170.0>,<0.2171.0>,nil,<<"1352829626308033">>, <0.2167.0>,<0.2172.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2167.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2167.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2167.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/6">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/6.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.925,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2170.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 619 neighbours: [error_logger:error,2012-11-13T10:01:24.927,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.1693.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.1693.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.1690.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 863 neighbours: [error_logger:error,2012-11-13T10:01:24.928,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2139.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2139.0>,<0.2140.0>,nil,<<"1352829625961162">>, <0.2136.0>,<0.2141.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2136.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2136.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2136.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/8">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/8.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.932,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2139.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 644 neighbours: [error_logger:error,2012-11-13T10:01:24.932,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2182.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2182.0>,<0.2183.0>,nil,<<"1352829626532283">>, <0.2179.0>,<0.2184.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2179.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2179.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2179.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/4">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/4.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.937,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2182.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 539 neighbours: [error_logger:error,2012-11-13T10:01:24.938,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1949.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1949.0>,<0.1950.0>,nil,<<"1352829622541445">>, <0.1946.0>,<0.1951.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1946.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1946.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1946.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/39">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/39.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.942,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1949.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 673 neighbours: [error_logger:error,2012-11-13T10:01:24.943,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1864.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1864.0>,<0.1865.0>,nil,<<"1352829621044192">>, <0.1861.0>,<0.1876.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1861.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1861.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1861.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/51">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/51.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.946,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1864.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 668 neighbours: [error_logger:error,2012-11-13T10:01:24.947,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2097.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2097.0>,<0.2098.0>,nil,<<"1352829625075442">>, <0.2094.0>,<0.2099.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2094.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2094.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2094.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/15">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/15.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.950,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1887.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1887.0>,<0.1888.0>,nil,<<"1352829621295039">>, <0.1884.0>,<0.1889.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1884.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1884.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1884.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/49">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/49.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.953,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2097.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 676 neighbours: [error_logger:error,2012-11-13T10:01:24.954,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1816.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1816.0>,<0.1817.0>,nil,<<"1352829620203018">>, <0.1813.0>,<0.1818.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1813.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1813.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1813.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/59">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/59.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1887.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 693 neighbours: [error_logger:error,2012-11-13T10:01:24.959,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1816.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 688 neighbours: [error_logger:error,2012-11-13T10:01:24.959,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1880.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1880.0>,<0.1881.0>,nil,<<"1352829621166446">>, <0.1877.0>,<0.1883.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1877.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/50">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/50.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.963,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1880.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:info,2012-11-13T10:01:24.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:24.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1955.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1955.0>,<0.1956.0>,nil,<<"1352829622642724">>, <0.1952.0>,<0.1957.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1952.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1952.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1952.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/38">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/38.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.968,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1955.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 725 neighbours: [error_logger:error,2012-11-13T10:01:24.969,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1822.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1822.0>,<0.1823.0>,nil,<<"1352829620303621">>, <0.1819.0>,<0.1824.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1819.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1819.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1819.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/58">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/58.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.972,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2043.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2043.0>,<0.2044.0>,nil,<<"1352829624110888">>, <0.2040.0>,<0.2045.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2040.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2040.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2040.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/24">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/24.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.976,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1822.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 720 neighbours: [error_logger:error,2012-11-13T10:01:24.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2043.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 724 neighbours: [error_logger:error,2012-11-13T10:01:24.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2091.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2091.0>,<0.2092.0>,nil,<<"1352829624963872">>, <0.2088.0>,<0.2093.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2088.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2088.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2088.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/16">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/16.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2091.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 736 neighbours: [error_logger:error,2012-11-13T10:01:24.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2015.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2015.0>,<0.2016.0>,nil,<<"1352829623674489">>, <0.2012.0>,<0.2017.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2012.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2012.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2012.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/28">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/28.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.986,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2015.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 744 neighbours: [error_logger:error,2012-11-13T10:01:24.987,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1840.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1840.0>,<0.1841.0>,nil,<<"1352829620605270">>, <0.1837.0>,<0.1842.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1837.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1837.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1837.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/55">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/55.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.990,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1840.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 752 neighbours: [error_logger:info,2012-11-13T10:01:24.991,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:25.114,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1846.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1846.0>,<0.1847.0>,nil,<<"1352829620717440">>, <0.1843.0>,<0.1848.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1843.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1843.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1843.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/54">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/54.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.119,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1846.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 764 neighbours: [error_logger:error,2012-11-13T10:01:25.119,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2200.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2200.0>,<0.2201.0>,nil,<<"1352829626845747">>, <0.2197.0>,<0.2202.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2197.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2197.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2197.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/1">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/1.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.123,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2200.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 759 neighbours: [error_logger:error,2012-11-13T10:01:25.124,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1852.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1852.0>,<0.1853.0>,nil,<<"1352829620818124">>, <0.1849.0>,<0.1854.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1849.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1849.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1849.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/53">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/53.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.128,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1852.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 780 neighbours: [error_logger:error,2012-11-13T10:01:25.129,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1828.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1828.0>,<0.1829.0>,nil,<<"1352829620404369">>, <0.1825.0>,<0.1830.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1825.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1825.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1825.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/57">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/57.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:25.132,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:25.134,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1828.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 788 neighbours: [error_logger:info,2012-11-13T10:01:25.134,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:25.135,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2049.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2049.0>,<0.2050.0>,nil,<<"1352829624211604">>, <0.2046.0>,<0.2051.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2046.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2046.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2046.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/23">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/23.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.138,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2049.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 791 neighbours: [error_logger:info,2012-11-13T10:01:25.139,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:25.140,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:25.140,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:25.141,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1802.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1802.0>,<0.1803.0>,nil,<<"1352829619934967">>, <0.1799.0>,<0.1804.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1799.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/61">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/61.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.144,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1802.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 824 neighbours: [error_logger:error,2012-11-13T10:01:25.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2009.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2009.0>,<0.2010.0>,nil,<<"1352829623563532">>, <0.2006.0>,<0.2011.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2006.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2006.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2006.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/29">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/29.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:25.148,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:25.149,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2009.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 832 neighbours: [error_logger:error,2012-11-13T10:01:25.149,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1834.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1834.0>,<0.1835.0>,nil,<<"1352829620504957">>, <0.1831.0>,<0.1836.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1831.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1831.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1831.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/56">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/56.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.153,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1834.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 844 neighbours: [error_logger:error,2012-11-13T10:01:25.153,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2133.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2133.0>,<0.2134.0>,nil,<<"1352829625860252">>, <0.2130.0>,<0.2135.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2130.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2130.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2130.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/9">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/9.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.157,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2133.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 852 neighbours: [error_logger:error,2012-11-13T10:01:25.157,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2003.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2003.0>,<0.2004.0>,nil,<<"1352829623450371">>, <0.2000.0>,<0.2005.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2000.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2000.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2000.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/30">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/30.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.161,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2003.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 860 neighbours: [error_logger:error,2012-11-13T10:01:25.161,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1925.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1925.0>,<0.1926.0>,nil,<<"1352829622094265">>, <0.1922.0>,<0.1927.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1922.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1922.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1922.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/43">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/43.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.164,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1808.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1808.0>,<0.1809.0>,nil,<<"1352829620105814">>, <0.1805.0>,<0.1812.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1805.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/60">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/60.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.168,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1925.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 881 neighbours: [error_logger:error,2012-11-13T10:01:25.169,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1808.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.169,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2109.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2109.0>,<0.2110.0>,nil,<<"1352829625288026">>, <0.2106.0>,<0.2111.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2106.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2106.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2106.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/13">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/13.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.173,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2109.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.173,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1979.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1979.0>,<0.1980.0>,nil,<<"1352829623047373">>, <0.1976.0>,<0.1981.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1976.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1976.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1976.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/34">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/34.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.177,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1979.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 573 neighbours: [error_logger:error,2012-11-13T10:01:25.177,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1899.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1899.0>,<0.1900.0>,nil,<<"1352829621624691">>, <0.1896.0>,<0.1901.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1896.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1896.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1896.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/47">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/47.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.180,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1961.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1961.0>,<0.1962.0>,nil,<<"1352829622754407">>, <0.1958.0>,<0.1963.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1958.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1958.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1958.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/37">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/37.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.184,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1899.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 581 neighbours: [error_logger:error,2012-11-13T10:01:25.185,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1961.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 585 neighbours: [error_logger:error,2012-11-13T10:01:25.186,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2127.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2127.0>,<0.2128.0>,nil,<<"1352829625668919">>, <0.2124.0>,<0.2129.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2124.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2124.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2124.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/10">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/10.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.189,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2127.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 584 neighbours: [error_logger:error,2012-11-13T10:01:25.190,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2021.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2021.0>,<0.2022.0>,nil,<<"1352829623786335">>, <0.2018.0>,<0.2023.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2018.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2018.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2018.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/27">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/27.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.193,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2021.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.194,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1937.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1937.0>,<0.1938.0>,nil,<<"1352829622323724">>, <0.1934.0>,<0.1939.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1934.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1934.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1934.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/41">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/41.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.197,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1937.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 565 neighbours: [error_logger:error,2012-11-13T10:01:25.198,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2073.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2073.0>,<0.2074.0>,nil,<<"1352829624649630">>, <0.2070.0>,<0.2075.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2070.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2070.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2070.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/19">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/19.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.201,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2073.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 940 neighbours: [error_logger:error,2012-11-13T10:01:25.202,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2067.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2067.0>,<0.2068.0>,nil,<<"1352829624537966">>, <0.2064.0>,<0.2069.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2064.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2064.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2064.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/20">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/20.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.206,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2067.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.206,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1796.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1796.0>,<0.1797.0>,nil,<<"1352829619826563">>, <0.1793.0>,<0.1798.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1793.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/62">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/62.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.210,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1796.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 956 neighbours: [error_logger:error,2012-11-13T10:01:25.210,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2206.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2206.0>,<0.2207.0>,nil,<<"1352829626935149">>, <0.2203.0>,<0.2208.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2203.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2203.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2203.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/0">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/0.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.214,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2206.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 951 neighbours: [error_logger:error,2012-11-13T10:01:25.215,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2115.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2115.0>,<0.2116.0>,nil,<<"1352829625388570">>, <0.2112.0>,<0.2117.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2112.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2112.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2112.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/12">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/12.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.218,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2115.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 972 neighbours: [error_logger:error,2012-11-13T10:01:25.219,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1973.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1973.0>,<0.1974.0>,nil,<<"1352829622946642">>, <0.1970.0>,<0.1975.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1970.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1970.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1970.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/35">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/35.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.222,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1973.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 993 neighbours: [error_logger:error,2012-11-13T10:01:25.223,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2188.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2188.0>,<0.2189.0>,nil,<<"1352829626632806">>, <0.2185.0>,<0.2190.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2185.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2185.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2185.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/3">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/3.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.227,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2188.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 975 neighbours: [error_logger:error,2012-11-13T10:01:25.228,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2145.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2145.0>,<0.2146.0>,nil,<<"1352829626107228">>, <0.2142.0>,<0.2162.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2142.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2142.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2142.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/7">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/7.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.231,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2145.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.232,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1931.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1931.0>,<0.1932.0>,nil,<<"1352829622206092">>, <0.1928.0>,<0.1933.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1928.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1928.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1928.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/42">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/42.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.235,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1931.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 569 neighbours: [error_logger:error,2012-11-13T10:01:25.236,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2103.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2103.0>,<0.2104.0>,nil,<<"1352829625181072">>, <0.2100.0>,<0.2105.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2100.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2100.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2100.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/14">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/14.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.239,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2103.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 564 neighbours: [error_logger:error,2012-11-13T10:01:25.240,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1911.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1911.0>,<0.1912.0>,nil,<<"1352829621859289">>, <0.1908.0>,<0.1913.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1908.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1908.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1908.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/45">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/45.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.243,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1911.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1033 neighbours: [error_logger:error,2012-11-13T10:01:25.244,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1997.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1997.0>,<0.1998.0>,nil,<<"1352829623349426">>, <0.1994.0>,<0.1999.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1994.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1994.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1994.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/31">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/31.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.247,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1997.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1028 neighbours: [error_logger:error,2012-11-13T10:01:25.248,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2033.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2033.0>,<0.2034.0>,nil,<<"1352829624009960">>, <0.2030.0>,<0.2039.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2030.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2030.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2030.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/25">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/25.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.251,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2033.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1036 neighbours: [error_logger:error,2012-11-13T10:01:25.252,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1893.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1893.0>,<0.1894.0>,nil,<<"1352829621487024">>, <0.1890.0>,<0.1895.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1890.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1890.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1890.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/48">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/48.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1893.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1057 neighbours: [error_logger:error,2012-11-13T10:01:25.256,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1991.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1991.0>,<0.1992.0>,nil,<<"1352829623248968">>, <0.1988.0>,<0.1993.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1988.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1988.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1988.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/32">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/32.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.259,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1991.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1052 neighbours: [error_logger:error,2012-11-13T10:01:25.260,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2027.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2027.0>,<0.2028.0>,nil,<<"1352829623887061">>, <0.2024.0>,<0.2029.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2024.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2024.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2024.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/26">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/26.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1694.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1694.0>,<0.1695.0>,nil,<<"1352829618011823">>, <0.1690.0>,<0.1700.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/master">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/master.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2027.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1060 neighbours: [error_logger:error,2012-11-13T10:01:25.268,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1694.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 855 neighbours: [error_logger:error,2012-11-13T10:01:25.269,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1943.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1943.0>,<0.1944.0>,nil,<<"1352829622429697">>, <0.1940.0>,<0.1945.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1940.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1940.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1940.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/40">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/40.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.272,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1943.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1089 neighbours: [error_logger:error,2012-11-13T10:01:25.273,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1917.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1917.0>,<0.1918.0>,nil,<<"1352829621982184">>, <0.1914.0>,<0.1921.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1914.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1914.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1914.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/44">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/44.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.276,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1917.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1097 neighbours: [error_logger:error,2012-11-13T10:01:25.277,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2194.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2194.0>,<0.2195.0>,nil,<<"1352829626733573">>, <0.2191.0>,<0.2196.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2191.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2191.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2191.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/2">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/2.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1786.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1786.0>,<0.1787.0>,nil,<<"1352829619455301">>, <0.1783.0>,<0.1790.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1783.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1783.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1783.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/63">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/63.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.283,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2194.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 539 neighbours: [error_logger:error,2012-11-13T10:01:25.284,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1786.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.285,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1985.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1985.0>,<0.1986.0>,nil,<<"1352829623147982">>, <0.1982.0>,<0.1987.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1982.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1982.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1982.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/33">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/33.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.288,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1985.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.289,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2061.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2061.0>,<0.2062.0>,nil,<<"1352829624436684">>, <0.2058.0>,<0.2063.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2058.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2058.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2058.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/21">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/21.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.292,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2061.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:info,2012-11-13T10:01:25.294,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.2479.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.2479.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.295,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2482.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.297,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2483.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.298,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2484.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.298,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2485.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.766,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2486.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.767,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2889.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.768,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2890.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.769,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2891.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.771,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2892.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.772,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2895.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.774,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2896.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.775,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.2481.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.776,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2898.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.788,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2899.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.789,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2901.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.790,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2903.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.791,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2905.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.793,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2906.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.793,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2908.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.794,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2910.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.795,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2912.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.796,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2929.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.797,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.2897.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.799,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.2480.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:error,2012-11-13T10:01:25.800,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.2892.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.802,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2931.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:25.799,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{10,1,25}}}, {memory, [{total,32386488}, {processes,12537488}, {processes_used,12447728}, {system,19849000}, {atom,1397745}, {atom_used,1366130}, {binary,672328}, {code,13855624}, {ets,1928984}]}, {loaded, [capi_frontend,capi_spatial,qlc,mb_map,ns_rebalancer, ns_janitor_map_recoverer,stats_collector, couch_stats_reader,ns_vbm_sup,ns_vbm_new_sup, tap_replication_manager,mc_connection, capi_ddoc_replication_srv,capi_set_view_manager,mc_binary, single_bucket_sup,janitor_agent,mc_client_binary, ns_janitor,menelaus_web_remote_clusters,lib,mochinum, capi_utils,mochiweb_mime,mochiweb_io,mb_grid,mochijson2, set_view_update_daemon,mochiweb_response, xdc_rdoc_replication_srv,menelaus_web_buckets, compaction_daemon,menelaus_auth,stats_archiver, mochiweb_util,couch_httpd_view,system_stats_collector, mochiweb_request,couch_changes,ns_bucket_sup, mochiweb_headers,mochiweb,couch_set_view_util, geocouch_duplicates,xdc_rep_manager,ns_cluster_membership, ns_memcached_log_rotator,ns_port_server, supervisor_cushion,ns_port_init,ns_moxi_sup,ns_port_sup, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_util,menelaus_deps,menelaus_web,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,failover_safeness_level, mb_master_sup,cluster_compat_mode,gen_fsm, samples_loader_tasks,mb_master,xdc_replication_sup, remote_clusters_info,ns_bucket,ns_doctor,stats_reader, ns_heart,ns_mail_log,ns_mail_sup,work_queue, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, ns_node_disco,ns_node_disco_sup,ns_config_ets_dup,random, ns_log,ns_server_sup,ns_process_registry, cb_config_couch_sync,ns_config_log,ns_memcached,ns_pubsub, ns_config_isasl_sync,ns_config_replica,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, mnesia_index,mnesia_loader,file_sorter,dets_v9,dets_utils, dets_sup,dets_server,dets,mnesia_log,mnesia_snmp_hook, mnesia_checkpoint,mnesia_late_loader,mnesia_dumper, mnesia_snmp_sup,mnesia_checkpoint_sup,mnesia_frag, mnesia_tm,mnesia_recover,mnesia_sp,mnesia_locker, mnesia_event,mnesia_kernel_sup,mnesia_sup,mnesia_bup, mnesia_schema,mnesia_controller,mnesia_lib,mnesia_monitor, mnesia_subscr,mnesia,mb_mnesia,mb_mnesia_sup,ns_cluster, ns_cookie_manager,erl_epmd,inet_tcp_dist,gen_udp, inet_gethost_native,dist_manager,timeout_diag_logger, path_config,diag_handler,auth,ns_info,log_os_info, couch_config_writer,cb_init_loggers,mochiweb_acceptor, inet_tcp,gen_tcp,mochiweb_socket,mochiweb_socket_server, mochilists,mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view,snappy, couch_compress,couch_spatial_validation, couch_set_view_mapreduce,ejson,couch_doc, couch_db_update_notifier,couch_btree,couch_ref_counter, couch_uuids,couch_db_updater,couch_db,couch_auth_cache, couch_db_update_notifier_sup,couch_secondary_sup, couch_index_barrier,couch_event_sup,couch_log, couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {mnesia,"MNESIA CXC 138 12","4.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,560}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, ['sink-disk_xdcr','sink-disk_debug',ns_server_cluster_sup, 'sink-disk_couchdb','sink-disk_mapreduce_errors', couch_auth_cache,'sink-disk_views',erl_epmd, 'sink-disk_error',disk_log_sup,disk_log_server, code_server,application_controller,error_logger, couch_set_view,ale_sup,lhttpc_sup,ale_dynamic_sup, mochiweb_sup,auth,standard_error_sup,os_cmd_port_creator, kernel_safe_sup,lhttpc_manager,tftp_sup, couch_set_view_ddoc_cache,os_mon_sup, couch_index_merger_connection_pool,cpu_sup,couch_spatial, memsup,disksup,timer_server,couch_replica_index_barrier, couch_main_index_barrier,net_kernel,couch_replication, dist_manager,couch_task_events,rex,net_sup,couch_log, kernel_sup,global_name_server,file_server_2,cb_couch_sup, httpd_sup,ssl_connection_sup,'sink-disk_default', ssl_manager,ssl_broker_sup,ssl_server,sasl_safe_sup, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,ftp_sup,inets_sup,crypto_server, crypto_sup,sasl_sup,couch_secondary_services, couch_primary_services,couch_db_update, inet_gethost_native_sup,release_handler,couch_view, couch_uuids,overload,couch_task_status,alarm_handler, couch_server_sup,couch_server,dets_sup,dets,'sink-stderr', erl_prim_loader,couch_rep_sup,couch_query_servers, standard_error,init,couch_httpd,couch_file_write_guard, inet_gethost_native,couch_drv,inet_db, couch_db_update_notifier_sup,user,'sink-ns_log', couch_config,global_group,'sink-disk_stats', 'sink-disk_xdcr_errors']}, {cookie,bptrojzpwfmfrqou}, {wordsize,8}, {wall_clock,312}] [ns_server:info,2012-11-13T10:01:25.826,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:info,2012-11-13T10:01:25.829,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2934.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.831,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2935.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.832,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2936.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.833,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.2938.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.834,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.2948.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2950.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.836,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2951.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.838,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2952.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.839,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2953.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:27.243,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2954.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:27.244,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:440]Using existing disk schema on ['ns_1@127.0.0.1']. [ns_server:debug,2012-11-13T10:01:27.245,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-year' [error_logger:info,2012-11-13T10:01:27.246,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2986.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:27.246,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-month' [ns_server:debug,2012-11-13T10:01:27.247,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-week' [ns_server:debug,2012-11-13T10:01:27.247,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-day' [error_logger:info,2012-11-13T10:01:27.247,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2987.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:27.248,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-hour' [ns_server:debug,2012-11-13T10:01:27.248,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-minute' [error_logger:info,2012-11-13T10:01:27.248,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2988.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:27.248,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-week' [ns_server:debug,2012-11-13T10:01:27.249,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-day' [ns_server:debug,2012-11-13T10:01:27.249,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-month' [ns_server:debug,2012-11-13T10:01:27.250,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of local_config [error_logger:info,2012-11-13T10:01:27.249,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2989.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:27.250,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-minute' [ns_server:debug,2012-11-13T10:01:27.250,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of cluster [ns_server:debug,2012-11-13T10:01:27.251,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-year' [error_logger:info,2012-11-13T10:01:27.251,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.2949.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:27.251,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-hour' [error_logger:info,2012-11-13T10:01:27.251,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:28.521,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:init:268]Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@127.0.0.1']}, {debug,verbose}, {directory,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/mnesia"}, {dump_log_load_regulation,false}, {dump_log_time_threshold,180000}, {dump_log_update_in_place,true}, {dump_log_write_threshold,1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup,false}, {fallback_error_function,{mnesia,lkill}}, {is_running,yes}, {local_tables,['stats_archiver-default-minute','stats_archiver-default-year', 'stats_archiver-default-week','stats_archiver-default-month', 'stats_archiver-default-hour','stats_archiver-default-day', 'stats_archiver-@system-week','stats_archiver-@system-minute', 'stats_archiver-@system-month','stats_archiver-@system-day', 'stats_archiver-@system-hour',local_config, 'stats_archiver-@system-year',cluster,schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{8,0}}, {running_db_nodes,['ns_1@127.0.0.1']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers,[<0.2948.0>,<0.2939.0>]}, {tables,['stats_archiver-default-year','stats_archiver-default-month', 'stats_archiver-default-week','stats_archiver-default-day', 'stats_archiver-default-hour','stats_archiver-default-minute', 'stats_archiver-@system-week','stats_archiver-@system-day', 'stats_archiver-@system-month',local_config, 'stats_archiver-@system-minute',cluster, 'stats_archiver-@system-year','stats_archiver-@system-hour',schema]}, {transaction_commits,2}, {transaction_failures,14}, {transaction_log_writes,0}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {send_compressed,0}, {version,"4.5"}] Peers: ['ns_1@127.0.0.1'] [ns_server:info,2012-11-13T10:01:28.524,ns_1@127.0.0.1:ns_config_sup<0.3077.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:28.524,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.2939.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:28.525,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2937.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:28.526,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3078.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:28.526,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:28.527,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3079.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:28.528,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [ns_server:debug,2012-11-13T10:01:28.529,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:load_config:695]Here's full dynamic config we loaded: [[{{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {xdcr_worker_batch_size,100}, {xdcr_num_worker_process,4}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_connection_timeout,60}, {xdcr_checkpoint_interval,1800}, {xdcr_capi_checkpoint_timeout,10}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {memory_quota,2391}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}]] [ns_server:info,2012-11-13T10:01:28.541,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {memory_quota,2391}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {nodes_wanted,['ns_1@127.0.0.1']}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}] [ns_server:debug,2012-11-13T10:01:28.553,ns_1@127.0.0.1:ns_config_isasl_sync<0.3083.0>:ns_config_isasl_sync:init:53]isasl_sync init: ["/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw", "_admin","_admin"] [error_logger:info,2012-11-13T10:01:28.554,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3080.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:28.554,ns_1@127.0.0.1:ns_config_isasl_sync<0.3083.0>:ns_config_isasl_sync:init:61]isasl_sync init buckets: ["default"] [ns_server:debug,2012-11-13T10:01:28.555,ns_1@127.0.0.1:ns_config_isasl_sync<0.3083.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [error_logger:info,2012-11-13T10:01:28.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3082.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T10:01:28.557,ns_1@127.0.0.1:ns_config_isasl_sync<0.3083.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T10:01:29.560,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3083.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.562,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3086.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.563,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3088.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3077.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.565,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3090.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.570,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3092.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.571,ns_1@127.0.0.1:ns_node_disco<0.3098.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [error_logger:info,2012-11-13T10:01:29.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3093.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.573,ns_1@127.0.0.1:ns_cookie_manager<0.2935.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [error_logger:info,2012-11-13T10:01:29.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3094.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.574,ns_1@127.0.0.1:<0.3099.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [error_logger:info,2012-11-13T10:01:29.574,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3097.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.575,ns_1@127.0.0.1:<0.3099.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:info,2012-11-13T10:01:29.575,ns_1@127.0.0.1:ns_node_disco_events<0.3097.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T10:01:29.575,ns_1@127.0.0.1:ns_config_rep<0.3104.0>:ns_config_rep:init:66]init pulling [ns_server:debug,2012-11-13T10:01:29.576,ns_1@127.0.0.1:ns_config_rep<0.3104.0>:ns_config_rep:init:68]init pushing [error_logger:info,2012-11-13T10:01:29.576,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3098.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.577,ns_1@127.0.0.1:ns_config_rep<0.3104.0>:ns_config_rep:init:72]init reannouncing [error_logger:info,2012-11-13T10:01:29.577,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3101.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.578,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: auto_failover_cfg -> [{enabled,false},{timeout,30},{max_nodes,1},{count,0}] [ns_server:debug,2012-11-13T10:01:29.578,ns_1@127.0.0.1:ns_config_events<0.3078.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2012-11-13T10:01:29.580,ns_1@127.0.0.1:ns_config_rep<0.3104.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([auto_failover_cfg,autocompaction,buckets, cluster_compat_version,dynamic_config_version, email_alerts]..) [error_logger:info,2012-11-13T10:01:29.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3102.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.582,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3103.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.583,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:01:29.585,ns_1@127.0.0.1:ns_config_events<0.3078.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [error_logger:info,2012-11-13T10:01:29.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3104.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.585,ns_1@127.0.0.1:ns_cookie_manager<0.2935.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T10:01:29.586,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[[{map,[{0,[],['ns_1@127.0.0.1',undefined]}, {1,[],['ns_1@127.0.0.1',undefined]}, {2,[],['ns_1@127.0.0.1',undefined]}, {3,[],['ns_1@127.0.0.1',undefined]}, {4,[],['ns_1@127.0.0.1',undefined]}, {5,[],['ns_1@127.0.0.1',undefined]}, {6,[],['ns_1@127.0.0.1',undefined]}, {7,[],['ns_1@127.0.0.1',undefined]}, {8,[],['ns_1@127.0.0.1',undefined]}, {9,[],['ns_1@127.0.0.1',undefined]}, {10,[],['ns_1@127.0.0.1',undefined]}, {11,[],['ns_1@127.0.0.1',undefined]}, {12,[],['ns_1@127.0.0.1',undefined]}, {13,[],['ns_1@127.0.0.1',undefined]}, {14,[],['ns_1@127.0.0.1',undefined]}, {15,[],['ns_1@127.0.0.1',undefined]}, {16,[],['ns_1@127.0.0.1',undefined]}, {17,[],['ns_1@127.0.0.1',undefined]}, {18,[],['ns_1@127.0.0.1',undefined]}, {19,[],['ns_1@127.0.0.1',undefined]}, {20,[],['ns_1@127.0.0.1',undefined]}, {21,[],['ns_1@127.0.0.1',undefined]}, {22,[],['ns_1@127.0.0.1',undefined]}, {23,[],['ns_1@127.0.0.1',undefined]}, {24,[],['ns_1@127.0.0.1',undefined]}, {25,[],['ns_1@127.0.0.1',undefined]}, {26,[],['ns_1@127.0.0.1',undefined]}, {27,[],['ns_1@127.0.0.1',undefined]}, {28,[],['ns_1@127.0.0.1',undefined]}, {29,[],['ns_1@127.0.0.1',undefined]}, {30,[],['ns_1@127.0.0.1',undefined]}, {31,[],['ns_1@127.0.0.1',undefined]}, {32,[],['ns_1@127.0.0.1',undefined]}, {33,[],['ns_1@127.0.0.1',undefined]}, {34,[],['ns_1@127.0.0.1',undefined]}, {35,[],['ns_1@127.0.0.1',undefined]}, {36,[],['ns_1@127.0.0.1',undefined]}, {37,[],['ns_1@127.0.0.1',undefined]}, {38,[],['ns_1@127.0.0.1',undefined]}, {39,[],['ns_1@127.0.0.1',undefined]}, {40,[],['ns_1@127.0.0.1',undefined]}, {41,[],['ns_1@127.0.0.1',undefined]}, {42,[],['ns_1@127.0.0.1',undefined]}, {43,[],['ns_1@127.0.0.1',undefined]}, {44,[],['ns_1@127.0.0.1',undefined]}, {45,[],['ns_1@127.0.0.1',undefined]}, {46,[],['ns_1@127.0.0.1',undefined]}, {47,[],['ns_1@127.0.0.1',undefined]}, {48,[],['ns_1@127.0.0.1',undefined]}, {49,[],['ns_1@127.0.0.1',undefined]}, {50,[],['ns_1@127.0.0.1',undefined]}, {51,[],['ns_1@127.0.0.1',undefined]}, {52,[],['ns_1@127.0.0.1',undefined]}, {53,[],['ns_1@127.0.0.1',undefined]}, {54,[],['ns_1@127.0.0.1',undefined]}, {55,[],['ns_1@127.0.0.1',undefined]}, {56,[],['ns_1@127.0.0.1',undefined]}, {57,[],['ns_1@127.0.0.1',undefined]}, {58,[],['ns_1@127.0.0.1',undefined]}, {59,[],['ns_1@127.0.0.1',undefined]}, {60,[],['ns_1@127.0.0.1',undefined]}, {61,[],['ns_1@127.0.0.1',undefined]}, {62,[],['ns_1@127.0.0.1',undefined]}, {63,[],['ns_1@127.0.0.1',undefined]}]}, {fastForwardMap,[]}, {uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}]]}] [error_logger:info,2012-11-13T10:01:29.587,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3096.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:29.587,ns_1@127.0.0.1:<0.3110.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T10:01:29.588,ns_1@127.0.0.1:ns_log_events<0.3094.0>:ns_mail_log:init:44]ns_mail_log started up [ns_server:debug,2012-11-13T10:01:29.589,ns_1@127.0.0.1:ns_cookie_manager<0.2935.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T10:01:29.588,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:info,2012-11-13T10:01:29.590,ns_1@127.0.0.1:remote_clusters_info<0.3123.0>:remote_clusters_info:read_or_create_table:384]Reading remote_clusters_info content from /Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/remote_clusters_cache [ns_server:debug,2012-11-13T10:01:29.591,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: cluster_compat_version -> [2,0] [ns_server:debug,2012-11-13T10:01:29.591,ns_1@127.0.0.1:<0.3110.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [error_logger:info,2012-11-13T10:01:29.592,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3107.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.593,ns_1@127.0.0.1:<0.3118.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T10:01:29.593,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: dynamic_config_version -> [2,0] [ns_server:debug,2012-11-13T10:01:29.593,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@127.0.0.1'}, {latest,minute}]}}} [error_logger:info,2012-11-13T10:01:29.594,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3111.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.595,ns_1@127.0.0.1:<0.3118.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T10:01:29.595,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2012-11-13T10:01:29.595,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_local_xdcr_replications:307]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2012-11-13T10:01:29.596,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3112.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.596,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:error,2012-11-13T10:01:29.597,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2012-11-13T10:01:29.597,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2012-11-13T10:01:29.598,ns_1@127.0.0.1:ns_server_sup<0.3091.0>:mb_master:check_master_takeover_needed:144]Sending master node question to the following nodes: [] [ns_server:debug,2012-11-13T10:01:29.598,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: max_bucket_count -> 10 [ns_server:debug,2012-11-13T10:01:29.598,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: memory_quota -> 2391 [error_logger:info,2012-11-13T10:01:29.598,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3113.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.598,ns_1@127.0.0.1:ns_server_sup<0.3091.0>:mb_master:check_master_takeover_needed:146]Got replies: [] [ns_server:debug,2012-11-13T10:01:29.599,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T10:01:29.599,ns_1@127.0.0.1:ns_server_sup<0.3091.0>:mb_master:check_master_takeover_needed:152]Was unable to discover master, not going to force mastership takeover [ns_server:debug,2012-11-13T10:01:29.600,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: otp -> [{cookie,bptrojzpwfmfrqou}] [ns_server:debug,2012-11-13T10:01:29.601,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: remote_clusters -> [] [ns_server:debug,2012-11-13T10:01:29.601,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: replication -> [{enabled,true}] [error_logger:info,2012-11-13T10:01:29.601,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.3115.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.602,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: rest -> [{port,8091}] [user:info,2012-11-13T10:01:29.602,ns_1@127.0.0.1:mb_master<0.3130.0>:mb_master:init:89]I'm the only node, so I'm the master. [ns_server:info,2012-11-13T10:01:29.603,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [ns_server:debug,2012-11-13T10:01:29.604,ns_1@127.0.0.1:mb_master_sup<0.3132.0>:misc:start_singleton:855]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.3133.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:29.605,ns_1@127.0.0.1:<0.3137.0>:janitor_agent:new_style_query_vbucket_states_loop:116]Exception from query_vbucket_states of "default":'ns_1@127.0.0.1' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@127.0.0.1'}, query_vbucket_states,infinity]}}} [ns_server:debug,2012-11-13T10:01:29.605,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [error_logger:info,2012-11-13T10:01:29.605,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3114.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:29.607,ns_1@127.0.0.1:<0.3137.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:121]Waiting for "default" on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:29.607,ns_1@127.0.0.1:mb_master_sup<0.3132.0>:misc:start_singleton:855]start_singleton(gen_server, ns_tick, [], []): started as <0.3138.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:29.607,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: settings -> [{stats,[{send_stats,false}]}] [ns_server:debug,2012-11-13T10:01:29.608,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: uuid -> <<"b34a9c2e03786d913446a4e84919e1d5">> [error_logger:info,2012-11-13T10:01:29.608,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3116.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.610,ns_1@127.0.0.1:<0.3139.0>:auto_failover:init:120]init auto_failover. [ns_server:debug,2012-11-13T10:01:29.611,ns_1@127.0.0.1:mb_master_sup<0.3132.0>:misc:start_singleton:855]start_singleton(gen_server, auto_failover, [], []): started as <0.3139.0> on 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:29.611,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3117.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.616,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: vbucket_map_history -> [{[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}] [error_logger:info,2012-11-13T10:01:29.617,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3120.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.619,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_capi_checkpoint_timeout -> 10 [ns_server:debug,2012-11-13T10:01:29.620,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_checkpoint_interval -> 1800 [ns_server:debug,2012-11-13T10:01:29.620,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_connection_timeout -> 60 [user:info,2012-11-13T10:01:29.620,ns_1@127.0.0.1:ns_server_sup<0.3091.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [ns_server:debug,2012-11-13T10:01:29.620,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_doc_batch_size_kb -> 512 [ns_server:debug,2012-11-13T10:01:29.620,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_failure_restart_interval -> 30 [error_logger:info,2012-11-13T10:01:29.621,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3123.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.621,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_num_http_connections -> 20 [ns_server:debug,2012-11-13T10:01:29.622,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_num_retries_per_request -> 2 [ns_server:debug,2012-11-13T10:01:29.622,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_num_worker_process -> 4 [ns_server:debug,2012-11-13T10:01:29.622,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: xdcr_worker_batch_size -> 100 [ns_server:debug,2012-11-13T10:01:29.623,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2012-11-13T10:01:29.623,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_replica_indexers} -> 2 [error_logger:info,2012-11-13T10:01:29.623,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3133.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.623,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:info,2012-11-13T10:01:29.623,ns_1@127.0.0.1:<0.3170.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [ns_server:debug,2012-11-13T10:01:29.623,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2012-11-13T10:01:29.625,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,0} [error_logger:info,2012-11-13T10:01:29.625,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3138.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.627,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}] [ns_server:debug,2012-11-13T10:01:29.628,ns_1@127.0.0.1:<0.3173.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [error_logger:info,2012-11-13T10:01:29.628,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3139.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.629,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',membership} -> active [error_logger:info,2012-11-13T10:01:29.631,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3130.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:29.631,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}] [ns_server:debug,2012-11-13T10:01:29.632,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2012-11-13T10:01:29.633,ns_1@127.0.0.1:<0.3175.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [error_logger:info,2012-11-13T10:01:29.634,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3140.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.636,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}] [error_logger:info,2012-11-13T10:01:29.638,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3141.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:29.640,ns_1@127.0.0.1:<0.3179.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [ns_server:debug,2012-11-13T10:01:29.638,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [error_logger:info,2012-11-13T10:01:29.643,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3142.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.645,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [error_logger:info,2012-11-13T10:01:29.646,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3143.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.649,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3144.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.651,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3147.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.653,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3164.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.654,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3165.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.655,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3166.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.657,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3146.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.658,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3168.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.659,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3169.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.661,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3170.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.662,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3167.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.664,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3172.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.663,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2012-11-13T10:01:29.666,ns_1@127.0.0.1:ns_bucket_worker<0.3181.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"default"}, {single_bucket_sup,start_link,["default"]}, permanent,infinity,supervisor, [single_bucket_sup]} [ns_server:debug,2012-11-13T10:01:29.667,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@127.0.0.1'}, {latest,minute}]}}} [ns_server:error,2012-11-13T10:01:29.667,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2012-11-13T10:01:29.668,ns_1@127.0.0.1:capi_set_view_manager-default<0.3193.0>:capi_set_view_manager:init:218]Usable vbuckets: [48,32,16,0,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2, 18,53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,1,52,4,36,20,7,55, 39,23,58,42,26,10,61,45,29,13] [error_logger:info,2012-11-13T10:01:29.671,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3173.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.682,ns_1@127.0.0.1:capi_set_view_manager-default<0.3193.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:warn,2012-11-13T10:01:29.685,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [ns_server:debug,2012-11-13T10:01:29.691,ns_1@127.0.0.1:<0.3209.0>:supervisor_cushion:init:43]starting compaction_daemon with delay of 3000 [ns_server:debug,2012-11-13T10:01:29.692,ns_1@127.0.0.1:compaction_daemon<0.3210.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:01:29.693,ns_1@127.0.0.1:<0.3213.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [error_logger:info,2012-11-13T10:01:29.690,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3175.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:29.697,ns_1@127.0.0.1:<0.3213.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [error_logger:info,2012-11-13T10:01:29.701,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3171.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.703,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3178.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3179.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.709,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3181.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.710,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.3212.0>:xdc_rdoc_replication_srv:handle_info:132]doing replicate_newnodes_docs [ns_server:info,2012-11-13T10:01:29.710,ns_1@127.0.0.1:set_view_update_daemon<0.3218.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [ns_server:debug,2012-11-13T10:01:29.711,ns_1@127.0.0.1:<0.3216.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 535470 [error_logger:info,2012-11-13T10:01:29.712,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3182.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:29.712,ns_1@127.0.0.1:<0.3216.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:01:29.713,ns_1@127.0.0.1:compaction_daemon<0.3210.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [error_logger:info,2012-11-13T10:01:29.713,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3183.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:29.714,ns_1@127.0.0.1:compaction_daemon<0.3210.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [error_logger:info,2012-11-13T10:01:29.715,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3189.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.716,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3187.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.717,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3191.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.718,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3193.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.719,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3190.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.721,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3203.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.722,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3205.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.723,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3206.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.724,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3207.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.726,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3209.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.727,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3212.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.729,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3218.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.730,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3220.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.732,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3091.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:30.609,ns_1@127.0.0.1:<0.3137.0>:janitor_agent:new_style_query_vbucket_states_loop:116]Exception from query_vbucket_states of "default":'ns_1@127.0.0.1' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@127.0.0.1'}, query_vbucket_states,infinity]}}} [ns_server:debug,2012-11-13T10:01:30.609,ns_1@127.0.0.1:<0.3137.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:121]Waiting for "default" on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:30.692,ns_1@127.0.0.1:<0.3170.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:debug,2012-11-13T10:01:30.692,ns_1@127.0.0.1:<0.3170.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.3229.0> [ns_server:info,2012-11-13T10:01:30.701,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:ensure_bucket:1119]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=2507145216;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:01:30.703,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3201.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:30.704,ns_1@127.0.0.1:janitor_agent-default<0.3237.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2012-11-13T10:01:30.705,ns_1@127.0.0.1:janitor_agent-default<0.3237.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:01:30.705,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3234.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.709,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3235.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:30.710,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3236.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:30.712,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3237.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.714,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3238.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.715,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3239.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.717,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3241.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.718,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3243.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.719,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3244.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [views:debug,2012-11-13T10:01:30.720,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.721,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",63,dead,1} [views:debug,2012-11-13T10:01:30.722,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.723,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",62,dead,1} [views:debug,2012-11-13T10:01:30.725,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.725,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",61,dead,1} [views:debug,2012-11-13T10:01:30.727,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.727,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",60,dead,1} [views:debug,2012-11-13T10:01:30.729,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.730,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",59,dead,1} [views:debug,2012-11-13T10:01:30.732,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.732,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",58,dead,1} [views:debug,2012-11-13T10:01:30.735,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.735,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",57,dead,1} [views:debug,2012-11-13T10:01:30.737,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.738,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",56,dead,1} [views:debug,2012-11-13T10:01:30.740,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.741,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",55,dead,1} [views:debug,2012-11-13T10:01:30.743,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.744,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",54,dead,1} [views:debug,2012-11-13T10:01:30.747,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.748,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",53,dead,1} [views:debug,2012-11-13T10:01:30.750,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.751,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",52,dead,1} [views:debug,2012-11-13T10:01:30.753,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.754,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",51,dead,1} [views:debug,2012-11-13T10:01:30.756,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.756,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",50,dead,1} [views:debug,2012-11-13T10:01:30.759,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.759,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",49,dead,1} [views:debug,2012-11-13T10:01:30.762,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.762,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",48,dead,1} [views:debug,2012-11-13T10:01:30.765,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.765,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",47,dead,1} [views:debug,2012-11-13T10:01:30.769,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.769,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",46,dead,1} [views:debug,2012-11-13T10:01:30.772,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.773,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",45,dead,1} [views:debug,2012-11-13T10:01:30.776,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.776,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",44,dead,1} [views:debug,2012-11-13T10:01:30.782,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.783,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",43,dead,1} [views:debug,2012-11-13T10:01:30.786,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.786,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",42,dead,1} [views:debug,2012-11-13T10:01:30.789,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.789,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",41,dead,1} [views:debug,2012-11-13T10:01:30.793,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.793,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",40,dead,1} [views:debug,2012-11-13T10:01:30.797,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.797,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",39,dead,1} [views:debug,2012-11-13T10:01:30.800,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.800,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",38,dead,1} [views:debug,2012-11-13T10:01:30.803,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.804,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",37,dead,1} [views:debug,2012-11-13T10:01:30.806,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.806,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",36,dead,1} [views:debug,2012-11-13T10:01:30.809,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.809,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",35,dead,1} [views:debug,2012-11-13T10:01:30.812,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.812,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",34,dead,1} [views:debug,2012-11-13T10:01:30.815,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.815,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",33,dead,1} [views:debug,2012-11-13T10:01:30.817,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.818,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",32,dead,1} [views:debug,2012-11-13T10:01:30.820,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.820,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",31,dead,1} [views:debug,2012-11-13T10:01:30.823,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.823,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",30,dead,1} [views:debug,2012-11-13T10:01:30.829,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.829,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",29,dead,1} [views:debug,2012-11-13T10:01:30.832,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.832,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",28,dead,1} [views:debug,2012-11-13T10:01:30.834,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.835,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",27,dead,1} [views:debug,2012-11-13T10:01:30.837,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.838,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",26,dead,1} [views:debug,2012-11-13T10:01:30.841,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.841,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",25,dead,1} [views:debug,2012-11-13T10:01:30.845,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.845,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",24,dead,1} [views:debug,2012-11-13T10:01:30.848,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.848,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",23,dead,1} [views:debug,2012-11-13T10:01:30.851,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.851,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",22,dead,1} [views:debug,2012-11-13T10:01:30.854,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.854,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",21,dead,1} [views:debug,2012-11-13T10:01:30.857,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.857,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",20,dead,1} [views:debug,2012-11-13T10:01:30.860,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.861,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",19,dead,1} [views:debug,2012-11-13T10:01:30.864,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.865,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",18,dead,1} [views:debug,2012-11-13T10:01:30.869,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.869,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",17,dead,1} [views:debug,2012-11-13T10:01:30.872,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.872,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",16,dead,1} [views:debug,2012-11-13T10:01:30.875,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.875,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",15,dead,1} [views:debug,2012-11-13T10:01:30.878,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.879,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",14,dead,1} [views:debug,2012-11-13T10:01:30.883,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.883,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",13,dead,1} [views:debug,2012-11-13T10:01:30.886,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.886,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",12,dead,1} [views:debug,2012-11-13T10:01:30.889,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.890,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",11,dead,1} [ns_server:info,2012-11-13T10:01:30.890,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:log:171]memcached<0.3177.0>: Tue Nov 13 10:01:30.690386 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.3177.0>: Tue Nov 13 10:01:30.691887 PST 3: Connected to mccouch: "localhost:11213" memcached<0.3177.0>: Tue Nov 13 10:01:30.701758 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.3177.0>: Tue Nov 13 10:01:30.708314 PST 3: Failed to load mutation log, falling back to key dump memcached<0.3177.0>: Tue Nov 13 10:01:30.714911 PST 3: metadata loaded in 21 ms memcached<0.3177.0>: Tue Nov 13 10:01:30.718107 PST 3: warmup completed in 24 ms [views:debug,2012-11-13T10:01:30.894,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.895,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",10,dead,1} [views:debug,2012-11-13T10:01:30.897,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.898,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",9,dead,1} [views:debug,2012-11-13T10:01:30.900,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.900,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",8,dead,1} [views:debug,2012-11-13T10:01:30.903,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.904,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",7,dead,1} [views:debug,2012-11-13T10:01:30.907,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.910,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",6,dead,1} [views:debug,2012-11-13T10:01:30.913,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.914,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",5,dead,1} [views:debug,2012-11-13T10:01:30.918,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.918,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",4,dead,1} [views:debug,2012-11-13T10:01:30.921,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.921,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",3,dead,1} [views:debug,2012-11-13T10:01:30.925,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.925,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",2,dead,1} [views:debug,2012-11-13T10:01:30.931,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.931,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",1,dead,1} [views:debug,2012-11-13T10:01:30.934,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:30.935,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",0,dead,1} [ns_server:debug,2012-11-13T10:01:31.182,ns_1@127.0.0.1:capi_set_view_manager-default<0.3193.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [user:info,2012-11-13T10:01:31.184,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:handle_cast:581]Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:info,2012-11-13T10:01:31.611,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 0 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.612,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 1 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.612,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 2 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.612,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 3 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.613,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 4 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.613,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 5 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.613,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 6 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.620,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 7 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.620,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 8 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.620,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 9 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 10 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 11 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 12 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 13 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 14 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 15 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 16 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 17 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 18 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 19 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 20 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 21 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 22 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 23 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 24 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 25 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 26 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 27 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 28 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 29 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 30 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 31 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 32 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 33 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 34 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 35 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 36 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 37 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 38 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 39 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 40 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 41 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 42 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 43 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 44 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 45 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 46 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 47 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 48 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 49 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 50 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 51 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 52 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 53 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 54 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 55 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 56 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 57 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 58 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 59 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 60 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 61 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 62 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 63 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.632,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:info,2012-11-13T10:01:31.632,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:01:31.633,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:01:31.633,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:01:31.634,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [ns_server:info,2012-11-13T10:01:31.634,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:info,2012-11-13T10:01:31.634,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [views:debug,2012-11-13T10:01:31.634,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.635,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",63,active,1} [ns_server:info,2012-11-13T10:01:31.635,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:01:31.636,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:01:31.636,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:01:31.637,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [ns_server:info,2012-11-13T10:01:31.637,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [views:debug,2012-11-13T10:01:31.637,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: active (1) [ns_server:info,2012-11-13T10:01:31.637,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:debug,2012-11-13T10:01:31.638,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",62,active,1} [ns_server:info,2012-11-13T10:01:31.638,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:info,2012-11-13T10:01:31.638,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:info,2012-11-13T10:01:31.639,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:01:31.639,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [ns_server:info,2012-11-13T10:01:31.639,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:info,2012-11-13T10:01:31.640,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [views:debug,2012-11-13T10:01:31.640,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: active (1) [ns_server:info,2012-11-13T10:01:31.640,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:debug,2012-11-13T10:01:31.640,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",61,active,1} [ns_server:info,2012-11-13T10:01:31.641,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:01:31.641,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:01:31.642,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:01:31.642,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [views:debug,2012-11-13T10:01:31.642,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: active (1) [ns_server:info,2012-11-13T10:01:31.643,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:debug,2012-11-13T10:01:31.643,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",60,active,1} [ns_server:info,2012-11-13T10:01:31.643,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:01:31.643,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:01:31.644,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [ns_server:info,2012-11-13T10:01:31.644,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:01:31.645,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:01:31.645,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:01:31.646,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [ns_server:info,2012-11-13T10:01:31.646,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:info,2012-11-13T10:01:31.647,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:01:31.647,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:01:31.648,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:01:31.649,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:01:31.649,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [ns_server:info,2012-11-13T10:01:31.650,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:info,2012-11-13T10:01:31.651,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:01:31.651,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:01:31.652,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:01:31.653,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [ns_server:info,2012-11-13T10:01:31.653,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:info,2012-11-13T10:01:31.654,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:01:31.654,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:info,2012-11-13T10:01:31.655,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [ns_server:info,2012-11-13T10:01:31.656,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:01:31.657,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:01:31.657,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:01:31.658,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [views:debug,2012-11-13T10:01:31.659,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: active (1) [ns_server:info,2012-11-13T10:01:31.659,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:debug,2012-11-13T10:01:31.659,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",59,active,1} [ns_server:info,2012-11-13T10:01:31.660,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:01:31.660,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:01:31.661,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [ns_server:info,2012-11-13T10:01:31.661,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [views:debug,2012-11-13T10:01:31.662,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.662,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",58,active,1} [ns_server:info,2012-11-13T10:01:31.662,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:01:31.663,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:01:31.663,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [ns_server:info,2012-11-13T10:01:31.663,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:01:31.664,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:01:31.664,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [ns_server:info,2012-11-13T10:01:31.665,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [views:debug,2012-11-13T10:01:31.665,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.665,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",57,active,1} [ns_server:info,2012-11-13T10:01:31.666,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:01:31.666,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "default" [ns_server:info,2012-11-13T10:01:31.667,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:handle_call:248]Bucket "default" marked as warmed in 0 seconds [views:debug,2012-11-13T10:01:31.667,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.668,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",56,active,1} [views:debug,2012-11-13T10:01:31.711,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.711,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",55,active,1} [views:debug,2012-11-13T10:01:31.783,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.783,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",54,active,1} [views:debug,2012-11-13T10:01:31.791,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.791,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",53,active,1} [views:debug,2012-11-13T10:01:31.793,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.793,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",52,active,1} [views:debug,2012-11-13T10:01:31.796,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.796,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",51,active,1} [views:debug,2012-11-13T10:01:31.798,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.799,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",50,active,1} [views:debug,2012-11-13T10:01:31.801,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.801,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",49,active,1} [views:debug,2012-11-13T10:01:31.804,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.804,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",48,active,1} [views:debug,2012-11-13T10:01:31.807,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.808,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",47,active,1} [views:debug,2012-11-13T10:01:31.810,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.811,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",46,active,1} [views:debug,2012-11-13T10:01:31.814,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.814,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",45,active,1} [views:debug,2012-11-13T10:01:31.817,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.817,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",44,active,1} [views:debug,2012-11-13T10:01:31.820,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.820,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",43,active,1} [views:debug,2012-11-13T10:01:31.824,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.825,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",42,active,1} [views:debug,2012-11-13T10:01:31.828,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.828,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",41,active,1} [views:debug,2012-11-13T10:01:31.831,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.832,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",40,active,1} [views:debug,2012-11-13T10:01:31.835,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.835,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",39,active,1} [views:debug,2012-11-13T10:01:31.838,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.839,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",38,active,1} [views:debug,2012-11-13T10:01:31.841,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.841,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",37,active,1} [views:debug,2012-11-13T10:01:31.844,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.844,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",36,active,1} [views:debug,2012-11-13T10:01:31.846,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.847,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",35,active,1} [views:debug,2012-11-13T10:01:31.850,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.850,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",34,active,1} [views:debug,2012-11-13T10:01:31.852,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.853,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",33,active,1} [views:debug,2012-11-13T10:01:31.855,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.855,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",32,active,1} [views:debug,2012-11-13T10:01:31.865,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.865,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",31,active,1} [views:debug,2012-11-13T10:01:31.872,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.872,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",30,active,1} [views:debug,2012-11-13T10:01:31.874,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.875,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",29,active,1} [views:debug,2012-11-13T10:01:31.877,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.878,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",28,active,1} [views:debug,2012-11-13T10:01:31.880,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.880,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",27,active,1} [views:debug,2012-11-13T10:01:31.884,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.885,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",26,active,1} [views:debug,2012-11-13T10:01:31.897,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.898,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",25,active,1} [views:debug,2012-11-13T10:01:31.905,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.905,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",24,active,1} [views:debug,2012-11-13T10:01:31.907,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.908,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",23,active,1} [views:debug,2012-11-13T10:01:31.911,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.912,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",22,active,1} [views:debug,2012-11-13T10:01:31.915,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.916,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",21,active,1} [views:debug,2012-11-13T10:01:31.919,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.919,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",20,active,1} [views:debug,2012-11-13T10:01:31.921,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.922,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",19,active,1} [views:debug,2012-11-13T10:01:31.926,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.926,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",18,active,1} [views:debug,2012-11-13T10:01:31.929,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.930,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",17,active,1} [views:debug,2012-11-13T10:01:31.933,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.934,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",16,active,1} [views:debug,2012-11-13T10:01:31.937,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.938,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",15,active,1} [views:debug,2012-11-13T10:01:31.941,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.941,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",14,active,1} [views:debug,2012-11-13T10:01:31.944,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.945,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",13,active,1} [views:debug,2012-11-13T10:01:31.949,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.950,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",12,active,1} [views:debug,2012-11-13T10:01:31.953,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.953,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",11,active,1} [views:debug,2012-11-13T10:01:31.955,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.956,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",10,active,1} [views:debug,2012-11-13T10:01:31.958,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.959,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",9,active,1} [views:debug,2012-11-13T10:01:31.962,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.962,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",8,active,1} [views:debug,2012-11-13T10:01:31.965,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.965,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",7,active,1} [views:debug,2012-11-13T10:01:31.967,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.968,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",6,active,1} [views:debug,2012-11-13T10:01:31.971,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.971,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",5,active,1} [views:debug,2012-11-13T10:01:31.973,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.974,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",4,active,1} [views:debug,2012-11-13T10:01:31.976,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.976,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",3,active,1} [views:debug,2012-11-13T10:01:31.979,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.979,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",2,active,1} [views:debug,2012-11-13T10:01:31.981,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.982,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",1,active,1} [views:debug,2012-11-13T10:01:31.985,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:31.986,ns_1@127.0.0.1:<0.3229.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",0,active,1} [ns_server:info,2012-11-13T10:01:34.614,ns_1@127.0.0.1:ns_doctor<0.3120.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["default"] [ns_server:debug,2012-11-13T10:01:38.578,ns_1@127.0.0.1:<0.3219.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3218.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.579,ns_1@127.0.0.1:<0.3217.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_node_disco_events,<0.3212.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.579,ns_1@127.0.0.1:<0.3211.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3210.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.579,ns_1@127.0.0.1:<0.3204.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_stats_event,<0.3203.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.579,ns_1@127.0.0.1:<0.3208.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3207.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.580,ns_1@127.0.0.1:<0.3202.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_tick_event,<0.3190.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.580,ns_1@127.0.0.1:<0.3191.0>:single_bucket_sup:top_loop:28]Delegating exit {'EXIT',<0.3187.0>,shutdown} to child supervisor: <0.3192.0> [ns_server:debug,2012-11-13T10:01:38.582,ns_1@127.0.0.1:<0.3242.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_stats_event,<0.3241.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.582,ns_1@127.0.0.1:<0.3240.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_tick_event,<0.3239.0>} exited with reason shutdown [user:info,2012-11-13T10:01:38.582,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:terminate:661]Shutting down bucket "default" on 'ns_1@127.0.0.1' for server shutdown [ns_server:info,2012-11-13T10:01:38.582,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:terminate:672]This bucket shutdown is not due to bucket deletion. Doing nothing [ns_server:debug,2012-11-13T10:01:38.583,ns_1@127.0.0.1:<0.3196.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_node_disco_events,<0.3193.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.583,ns_1@127.0.0.1:<0.3191.0>:single_bucket_sup:top_loop:24]per-bucket supervisor for "default" died with reason shutdown [ns_server:debug,2012-11-13T10:01:38.583,ns_1@127.0.0.1:<0.3194.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3193.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.583,ns_1@127.0.0.1:<0.3197.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {mc_couch_events,<0.3193.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.583,ns_1@127.0.0.1:<0.3189.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3187.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.584,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:debug,2012-11-13T10:01:38.584,ns_1@127.0.0.1:<0.3180.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3178.0>} exited with reason killed [error_logger:error,2012-11-13T10:01:38.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.3189.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:38.784,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2012-11-13T10:01:38.785,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@127.0.0.1'}, {latest,minute}]}}} [ns_server:info,2012-11-13T10:01:38.785,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:log:171]memcached<0.3177.0>: EOL on stdin. Initiating shutdown [ns_server:debug,2012-11-13T10:01:38.785,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_local_xdcr_replications:307]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T10:01:38.786,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T10:01:38.809,ns_1@127.0.0.1:ns_doctor<0.3120.0>:ns_doctor:update_status:205]The following buckets became not ready on node 'ns_1@127.0.0.1': ["default"], those of them are active [] [ns_server:info,2012-11-13T10:01:38.858,ns_1@127.0.0.1:<0.3229.0>:mc_connection:run_loop:202]mccouch connection was normally closed [ns_server:info,2012-11-13T10:01:38.858,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:handle_info:104]Port server memcached exited with status 0 [ns_server:info,2012-11-13T10:01:38.859,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:log:171]memcached<0.3177.0>: Tue Nov 13 10:01:38.822590 PST 3: Shutting down tap connections! memcached<0.3177.0>: Tue Nov 13 10:01:38.823826 PST 3: Had to wait 1154 usec for shutdown [ns_server:debug,2012-11-13T10:01:38.859,ns_1@127.0.0.1:<0.3174.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:info,2012-11-13T10:01:38.860,ns_1@127.0.0.1:<0.3174.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:01:38.860,ns_1@127.0.0.1:<0.3174.0>:ns_port_server:log:171]moxi<0.3174.0>: EOL on stdin. Exiting [ns_server:debug,2012-11-13T10:01:38.860,ns_1@127.0.0.1:<0.3145.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {master_activity_events,<0.3144.0>} exited with reason killed [ns_server:info,2012-11-13T10:01:38.860,ns_1@127.0.0.1:mb_master<0.3130.0>:mb_master:terminate:288]Synchronously shutting down child mb_master_sup [ns_server:debug,2012-11-13T10:01:38.861,ns_1@127.0.0.1:<0.3121.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3120.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.861,ns_1@127.0.0.1:<0.3131.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3130.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.861,ns_1@127.0.0.1:<0.3119.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {buckets_events,<0.3117.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.861,ns_1@127.0.0.1:<0.3108.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3107.0>} exited with reason killed [ns_server:debug,2012-11-13T10:01:38.861,ns_1@127.0.0.1:<0.3105.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events_local,<0.3104.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.861,ns_1@127.0.0.1:<0.3095.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3093.0>} exited with reason killed [error_logger:error,2012-11-13T10:01:38.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.3090.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:38.965,ns_1@127.0.0.1:<0.3084.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3083.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.964,ns_1@127.0.0.1:<0.3089.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3088.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.965,ns_1@127.0.0.1:<0.3087.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3086.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:01:38.963,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:wait_saver:539]Waited for saver done. State= {config, {full, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", undefined,ns_config_default}, [[], [{xdcr_checkpoint_interval,1800}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_worker_batch_size,100}, {xdcr_connection_timeout,60}, {xdcr_num_worker_process,4}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {directory, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config"}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {rest_creds,[{creds,[]}]}, {remote_clusters,[]}, {{node,'ns_1@127.0.0.1',isasl}, [{path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {memory_quota,2384}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}]], [[{{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {xdcr_worker_batch_size,100}, {xdcr_num_worker_process,4}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_connection_timeout,60}, {xdcr_checkpoint_interval,1800}, {xdcr_capi_checkpoint_timeout,10}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,2391}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} [ns_server:info,2012-11-13T10:01:39.078,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:terminate:277]Shut Mnesia down: shutdown. Exiting. [error_logger:info,2012-11-13T10:01:39.078,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mnesia exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:39.088,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.2489.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.2489.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.2487.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 377 stack_size: 24 reductions: 691 neighbours: [error_logger:error,2012-11-13T10:01:39.089,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2754.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2754.0>,<0.2755.0>,nil,<<"1352829685736678">>, <0.2751.0>,<0.2756.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2751.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2751.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2751.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/47">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/47.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.094,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2754.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 439 neighbours: [error_logger:error,2012-11-13T10:01:39.095,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2682.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2682.0>,<0.2683.0>,nil,<<"1352829685721321">>, <0.2679.0>,<0.2684.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2679.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2679.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2679.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/36">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/36.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.100,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2682.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 447 neighbours: [error_logger:error,2012-11-13T10:01:39.100,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2718.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2718.0>,<0.2719.0>,nil,<<"1352829685729409">>, <0.2715.0>,<0.2720.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2715.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2715.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2715.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/41">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/41.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.106,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2718.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 455 neighbours: [error_logger:error,2012-11-13T10:01:39.107,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2832.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2832.0>,<0.2833.0>,nil,<<"1352829685753066">>, <0.2829.0>,<0.2834.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2829.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2829.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2829.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/59">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/59.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.112,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2832.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 450 neighbours: [error_logger:error,2012-11-13T10:01:39.113,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2826.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2826.0>,<0.2827.0>,nil,<<"1352829685751685">>, <0.2823.0>,<0.2828.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2823.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2823.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2823.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/58">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/58.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.117,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2826.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 458 neighbours: [error_logger:error,2012-11-13T10:01:39.118,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2856.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2856.0>,<0.2857.0>,nil,<<"1352829685758407">>, <0.2853.0>,<0.2858.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2853.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2853.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2853.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/62">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/62.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.124,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2856.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 466 neighbours: [error_logger:error,2012-11-13T10:01:39.125,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2808.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2808.0>,<0.2809.0>,nil,<<"1352829685747757">>, <0.2805.0>,<0.2810.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2805.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/55">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/55.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.130,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2808.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 474 neighbours: [error_logger:error,2012-11-13T10:01:39.131,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2862.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2862.0>,<0.2863.0>,nil,<<"1352829685759790">>, <0.2859.0>,<0.2864.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2859.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2859.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2859.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/63">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/63.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.135,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2862.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 482 neighbours: [error_logger:error,2012-11-13T10:01:39.136,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2496.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2496.0>,<0.2497.0>,nil,<<"1352829685577250">>, <0.2493.0>,<0.2498.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.2493.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2493.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2493.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.140,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2814.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2814.0>,<0.2815.0>,nil,<<"1352829685749123">>, <0.2811.0>,<0.2816.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2811.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2811.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2811.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/56">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/56.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.143,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2496.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 309 neighbours: [error_logger:error,2012-11-13T10:01:39.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2814.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 494 neighbours: [error_logger:error,2012-11-13T10:01:39.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2850.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2850.0>,<0.2851.0>,nil,<<"1352829685756921">>, <0.2847.0>,<0.2852.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2847.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2847.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2847.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/61">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/61.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.148,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2796.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2796.0>,<0.2797.0>,nil,<<"1352829685745381">>, <0.2793.0>,<0.2798.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2793.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/53">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/53.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.152,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2796.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 422 neighbours: [error_logger:error,2012-11-13T10:01:39.154,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2850.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 506 neighbours: [error_logger:info,2012-11-13T10:01:39.154,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:info,2012-11-13T10:01:39.155,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:info,2012-11-13T10:01:39.155,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:39.156,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2640.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2640.0>,<0.2641.0>,nil,<<"1352829685711040">>, <0.2637.0>,<0.2642.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2637.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2637.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2637.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/3">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/3.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:39.235,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:39.236,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2640.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 426 neighbours: [error_logger:error,2012-11-13T10:01:39.237,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2490.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2490.0>,<0.2491.0>,nil,<<"1352829685334923">>, <0.2487.0>,<0.2492.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.2487.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2487.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2487.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.241,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2490.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 436 neighbours: [error_logger:error,2012-11-13T10:01:39.242,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2502.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2502.0>,<0.2503.0>,nil,<<"1352829685683035">>, <0.2499.0>,<0.2504.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2499.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2499.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2499.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/0">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/0.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.245,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2502.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 554 neighbours: [error_logger:error,2012-11-13T10:01:39.246,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2712.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2712.0>,<0.2713.0>,nil,<<"1352829685727974">>, <0.2709.0>,<0.2714.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2709.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2709.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2709.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/40">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/40.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.249,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2712.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 575 neighbours: [error_logger:error,2012-11-13T10:01:39.250,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2586.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2586.0>,<0.2587.0>,nil,<<"1352829685699966">>, <0.2583.0>,<0.2588.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2583.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2583.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2583.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/21">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/21.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.253,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2586.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 570 neighbours: [error_logger:error,2012-11-13T10:01:39.254,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2784.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2784.0>,<0.2785.0>,nil,<<"1352829685742942">>, <0.2781.0>,<0.2786.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2781.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2781.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2781.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/51">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/51.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.257,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2784.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 578 neighbours: [error_logger:error,2012-11-13T10:01:39.258,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2874.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2874.0>,<0.2875.0>,nil,<<"1352829685762498">>, <0.2871.0>,<0.2876.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2871.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2871.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2871.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/8">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/8.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2874.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 586 neighbours: [error_logger:error,2012-11-13T10:01:39.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2616.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2616.0>,<0.2617.0>,nil,<<"1352829685706449">>, <0.2613.0>,<0.2618.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2613.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2613.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2613.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/26">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/26.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2616.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 594 neighbours: [error_logger:error,2012-11-13T10:01:39.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2634.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2634.0>,<0.2635.0>,nil,<<"1352829685709980">>, <0.2631.0>,<0.2636.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2631.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2631.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2631.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/29">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/29.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:39.269,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:39.270,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2634.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 615 neighbours: [error_logger:error,2012-11-13T10:01:39.271,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2670.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2670.0>,<0.2671.0>,nil,<<"1352829685718719">>, <0.2667.0>,<0.2672.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2667.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2667.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2667.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/34">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/34.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.274,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2670.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 627 neighbours: [error_logger:error,2012-11-13T10:01:39.275,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2610.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2610.0>,<0.2611.0>,nil,<<"1352829685705115">>, <0.2607.0>,<0.2612.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2607.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2607.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2607.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/25">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/25.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.278,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2610.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 622 neighbours: [error_logger:info,2012-11-13T10:01:39.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:39.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2778.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2778.0>,<0.2779.0>,nil,<<"1352829685741611">>, <0.2775.0>,<0.2780.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2775.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2775.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2775.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/50">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/50.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.283,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2778.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 422 neighbours: [error_logger:info,2012-11-13T10:01:39.284,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:39.284,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:39.285,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2520.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2520.0>,<0.2521.0>,nil,<<"1352829685687483">>, <0.2517.0>,<0.2522.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2517.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2517.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2517.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/11">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/11.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.288,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2520.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 438 neighbours: [error_logger:error,2012-11-13T10:01:39.289,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2568.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2568.0>,<0.2569.0>,nil,<<"1352829685695867">>, <0.2565.0>,<0.2570.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2565.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2565.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2565.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/19">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/19.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.292,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2568.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 446 neighbours: [error_logger:error,2012-11-13T10:01:39.293,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2688.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2688.0>,<0.2689.0>,nil,<<"1352829685722740">>, <0.2685.0>,<0.2690.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2685.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2685.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2685.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/37">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/37.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.296,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2688.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 467 neighbours: [error_logger:error,2012-11-13T10:01:39.297,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2550.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2550.0>,<0.2551.0>,nil,<<"1352829685692516">>, <0.2547.0>,<0.2552.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2547.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2547.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2547.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/16">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/16.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.300,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2550.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 462 neighbours: [error_logger:error,2012-11-13T10:01:39.301,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2742.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2742.0>,<0.2743.0>,nil,<<"1352829685734113">>, <0.2739.0>,<0.2744.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2739.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2739.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2739.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/45">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/45.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.304,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2742.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 483 neighbours: [error_logger:error,2012-11-13T10:01:39.305,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2700.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2700.0>,<0.2701.0>,nil,<<"1352829685725516">>, <0.2697.0>,<0.2702.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2697.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2697.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2697.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/39">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/39.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.308,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2700.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 491 neighbours: [error_logger:error,2012-11-13T10:01:39.309,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2838.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2838.0>,<0.2839.0>,nil,<<"1352829685754116">>, <0.2835.0>,<0.2840.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2835.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2835.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2835.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/6">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/6.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.312,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2838.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 694 neighbours: [error_logger:error,2012-11-13T10:01:39.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2868.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2868.0>,<0.2869.0>,nil,<<"1352829685761171">>, <0.2865.0>,<0.2870.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2865.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2865.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2865.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/7">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/7.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.317,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.2885.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.2885.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.2883.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 729 neighbours: [error_logger:error,2012-11-13T10:01:39.319,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2868.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 702 neighbours: [error_logger:error,2012-11-13T10:01:39.319,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2532.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2532.0>,<0.2533.0>,nil,<<"1352829685689439">>, <0.2529.0>,<0.2534.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2529.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2529.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2529.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/13">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/13.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.323,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2532.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 426 neighbours: [error_logger:error,2012-11-13T10:01:39.324,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2544.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2544.0>,<0.2545.0>,nil,<<"1352829685691394">>, <0.2541.0>,<0.2546.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2541.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2541.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2541.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/15">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/15.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.328,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2544.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 434 neighbours: [error_logger:error,2012-11-13T10:01:39.329,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2766.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2766.0>,<0.2767.0>,nil,<<"1352829685738876">>, <0.2763.0>,<0.2768.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2763.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2763.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2763.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/49">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/49.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.332,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2766.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 442 neighbours: [error_logger:error,2012-11-13T10:01:39.333,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2820.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2820.0>,<0.2821.0>,nil,<<"1352829685750202">>, <0.2817.0>,<0.2822.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2817.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2817.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2817.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/57">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/57.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.336,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2820.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 450 neighbours: [error_logger:error,2012-11-13T10:01:39.337,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2598.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2598.0>,<0.2599.0>,nil,<<"1352829685702622">>, <0.2595.0>,<0.2600.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2595.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2595.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2595.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/23">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/23.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.340,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2598.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 458 neighbours: [error_logger:error,2012-11-13T10:01:39.341,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2628.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2628.0>,<0.2629.0>,nil,<<"1352829685708726">>, <0.2625.0>,<0.2630.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2625.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2625.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2625.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/28">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/28.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.345,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2628.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 479 neighbours: [error_logger:error,2012-11-13T10:01:39.346,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2652.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2652.0>,<0.2653.0>,nil,<<"1352829685713972">>, <0.2649.0>,<0.2654.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2649.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2649.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2649.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/31">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/31.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.458,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2652.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 474 neighbours: [error_logger:error,2012-11-13T10:01:39.459,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2646.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2646.0>,<0.2647.0>,nil,<<"1352829685712452">>, <0.2643.0>,<0.2648.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2643.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2643.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2643.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/30">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/30.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.462,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2646.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 482 neighbours: [error_logger:error,2012-11-13T10:01:39.463,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2622.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2622.0>,<0.2623.0>,nil,<<"1352829685707601">>, <0.2619.0>,<0.2624.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2619.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2619.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2619.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/27">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/27.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2622.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 490 neighbours: [error_logger:error,2012-11-13T10:01:39.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2802.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2802.0>,<0.2803.0>,nil,<<"1352829685746658">>, <0.2799.0>,<0.2804.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2799.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/54">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/54.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.471,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2802.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 498 neighbours: [error_logger:error,2012-11-13T10:01:39.472,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2514.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2514.0>,<0.2515.0>,nil,<<"1352829685686322">>, <0.2511.0>,<0.2516.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2511.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2511.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2511.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/10">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/10.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.475,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2514.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 506 neighbours: [error_logger:error,2012-11-13T10:01:39.476,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2772.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2772.0>,<0.2773.0>,nil,<<"1352829685740221">>, <0.2769.0>,<0.2774.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2769.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2769.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2769.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/5">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/5.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.479,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2772.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 514 neighbours: [error_logger:error,2012-11-13T10:01:39.480,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2880.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2880.0>,<0.2881.0>,nil,<<"1352829685763855">>, <0.2877.0>,<0.2882.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2877.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/9">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/9.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.483,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2880.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 522 neighbours: [error_logger:error,2012-11-13T10:01:39.484,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2538.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2538.0>,<0.2539.0>,nil,<<"1352829685690418">>, <0.2535.0>,<0.2540.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2535.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2535.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2535.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/14">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/14.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.488,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2538.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 530 neighbours: [error_logger:error,2012-11-13T10:01:39.488,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2508.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2508.0>,<0.2509.0>,nil,<<"1352829685684645">>, <0.2505.0>,<0.2510.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2505.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2505.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2505.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/1">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/1.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.492,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2508.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 538 neighbours: [error_logger:error,2012-11-13T10:01:39.493,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2844.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2844.0>,<0.2845.0>,nil,<<"1352829685755504">>, <0.2841.0>,<0.2846.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2841.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2841.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2841.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/60">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/60.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.496,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2844.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 546 neighbours: [error_logger:error,2012-11-13T10:01:39.497,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2562.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2562.0>,<0.2563.0>,nil,<<"1352829685694660">>, <0.2559.0>,<0.2564.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2559.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2559.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2559.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/18">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/18.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.500,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2562.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 554 neighbours: [error_logger:error,2012-11-13T10:01:39.501,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2676.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2676.0>,<0.2677.0>,nil,<<"1352829685719901">>, <0.2673.0>,<0.2678.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2673.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2673.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2673.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/35">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/35.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2676.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 575 neighbours: [error_logger:error,2012-11-13T10:01:39.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2526.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2526.0>,<0.2527.0>,nil,<<"1352829685688455">>, <0.2523.0>,<0.2528.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2523.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2523.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2523.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/12">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/12.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.508,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2724.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2724.0>,<0.2725.0>,nil,<<"1352829685730532">>, <0.2721.0>,<0.2726.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2721.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2721.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2721.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/42">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/42.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.512,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2526.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 570 neighbours: [error_logger:error,2012-11-13T10:01:39.513,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2724.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 587 neighbours: [error_logger:error,2012-11-13T10:01:39.514,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2760.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2760.0>,<0.2761.0>,nil,<<"1352829685737859">>, <0.2757.0>,<0.2762.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2757.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2757.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2757.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/48">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/48.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2760.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 599 neighbours: [error_logger:error,2012-11-13T10:01:39.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2706.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2706.0>,<0.2707.0>,nil,<<"1352829685726622">>, <0.2703.0>,<0.2708.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2703.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2703.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2703.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/4">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/4.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.522,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2706.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 594 neighbours: [error_logger:error,2012-11-13T10:01:39.523,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2694.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2694.0>,<0.2695.0>,nil,<<"1352829685723978">>, <0.2691.0>,<0.2696.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2691.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2691.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2691.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/38">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/38.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.526,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2694.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 602 neighbours: [error_logger:error,2012-11-13T10:01:39.527,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2556.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2556.0>,<0.2557.0>,nil,<<"1352829685693528">>, <0.2553.0>,<0.2558.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2553.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2553.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2553.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/17">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/17.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.530,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2556.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 610 neighbours: [error_logger:error,2012-11-13T10:01:39.531,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2748.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2748.0>,<0.2749.0>,nil,<<"1352829685735209">>, <0.2745.0>,<0.2750.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2745.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2745.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2745.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/46">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/46.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.535,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2748.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 631 neighbours: [error_logger:error,2012-11-13T10:01:39.536,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2574.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2574.0>,<0.2575.0>,nil,<<"1352829685697361">>, <0.2571.0>,<0.2576.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2571.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2571.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2571.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/2">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/2.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.539,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2736.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2736.0>,<0.2737.0>,nil,<<"1352829685732765">>, <0.2733.0>,<0.2738.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2733.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2733.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2733.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/44">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/44.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.542,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2574.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 626 neighbours: [error_logger:error,2012-11-13T10:01:39.544,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2736.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 643 neighbours: [error_logger:error,2012-11-13T10:01:39.544,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2790.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2790.0>,<0.2791.0>,nil,<<"1352829685744125">>, <0.2787.0>,<0.2792.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2787.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2787.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2787.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/52">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/52.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.548,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2790.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 642 neighbours: [error_logger:error,2012-11-13T10:01:39.549,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2886.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2886.0>,<0.2887.0>,nil,<<"1352829685765663">>, <0.2883.0>,<0.2888.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/master">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/master.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.551,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2580.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2580.0>,<0.2581.0>,nil,<<"1352829685698636">>, <0.2577.0>,<0.2582.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2577.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2577.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2577.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/20">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/20.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2886.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 538 neighbours: [error_logger:error,2012-11-13T10:01:39.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2580.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 654 neighbours: [error_logger:error,2012-11-13T10:01:39.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2592.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2592.0>,<0.2593.0>,nil,<<"1352829685701266">>, <0.2589.0>,<0.2594.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2589.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2589.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2589.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/22">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/22.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.560,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2658.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2658.0>,<0.2659.0>,nil,<<"1352829685715431">>, <0.2655.0>,<0.2660.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2655.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2655.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2655.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/32">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/32.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.563,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2658.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 670 neighbours: [error_logger:error,2012-11-13T10:01:39.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2664.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2664.0>,<0.2665.0>,nil,<<"1352829685717309">>, <0.2661.0>,<0.2666.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2661.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2661.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2661.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/33">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/33.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.568,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2664.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 678 neighbours: [error_logger:error,2012-11-13T10:01:39.568,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2604.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2604.0>,<0.2605.0>,nil,<<"1352829685703745">>, <0.2601.0>,<0.2606.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2601.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2601.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2601.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/24">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/24.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2604.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 686 neighbours: [error_logger:error,2012-11-13T10:01:39.574,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2592.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 666 neighbours: [error_logger:error,2012-11-13T10:01:39.575,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2730.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2730.0>,<0.2731.0>,nil,<<"1352829685731704">>, <0.2727.0>,<0.2732.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2727.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2727.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2727.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/43">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/43.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2730.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 711 neighbours: [error_logger:info,2012-11-13T10:01:39.581,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.3300.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.3300.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.583,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3303.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.584,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3304.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3305.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.586,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3306.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.813,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3307.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.814,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3711.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.816,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3712.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.817,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3713.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.818,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3714.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.819,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3717.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.821,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3718.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.822,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.3302.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.824,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3720.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.825,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3721.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.826,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3723.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.827,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3725.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.828,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3727.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.830,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3728.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.831,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3730.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.832,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3732.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.833,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3734.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3751.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.836,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.3719.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.838,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.3301.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:39.839,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{10,1,39}}}, {memory, [{total,31876640}, {processes,12139136}, {processes_used,11985520}, {system,19737504}, {atom,1397745}, {atom_used,1366130}, {binary,560080}, {code,13855624}, {ets,1928712}]}, {loaded, [capi_frontend,capi_spatial,qlc,mb_map,ns_rebalancer, ns_janitor_map_recoverer,stats_collector, couch_stats_reader,ns_vbm_sup,ns_vbm_new_sup, tap_replication_manager,mc_connection, capi_ddoc_replication_srv,capi_set_view_manager,mc_binary, single_bucket_sup,janitor_agent,mc_client_binary, ns_janitor,menelaus_web_remote_clusters,lib,mochinum, capi_utils,mochiweb_mime,mochiweb_io,mb_grid,mochijson2, set_view_update_daemon,mochiweb_response, xdc_rdoc_replication_srv,menelaus_web_buckets, compaction_daemon,menelaus_auth,stats_archiver, mochiweb_util,couch_httpd_view,system_stats_collector, mochiweb_request,couch_changes,ns_bucket_sup, mochiweb_headers,mochiweb,couch_set_view_util, geocouch_duplicates,xdc_rep_manager,ns_cluster_membership, ns_memcached_log_rotator,ns_port_server, supervisor_cushion,ns_port_init,ns_moxi_sup,ns_port_sup, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_util,menelaus_deps,menelaus_web,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,failover_safeness_level, mb_master_sup,cluster_compat_mode,gen_fsm, samples_loader_tasks,mb_master,xdc_replication_sup, remote_clusters_info,ns_bucket,ns_doctor,stats_reader, ns_heart,ns_mail_log,ns_mail_sup,work_queue, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, ns_node_disco,ns_node_disco_sup,ns_config_ets_dup,random, ns_log,ns_server_sup,ns_process_registry, cb_config_couch_sync,ns_config_log,ns_memcached,ns_pubsub, ns_config_isasl_sync,ns_config_replica,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, mnesia_index,mnesia_loader,file_sorter,dets_v9,dets_utils, dets_sup,dets_server,dets,mnesia_log,mnesia_snmp_hook, mnesia_checkpoint,mnesia_late_loader,mnesia_dumper, mnesia_snmp_sup,mnesia_checkpoint_sup,mnesia_frag, mnesia_tm,mnesia_recover,mnesia_sp,mnesia_locker, mnesia_event,mnesia_kernel_sup,mnesia_sup,mnesia_bup, mnesia_schema,mnesia_controller,mnesia_lib,mnesia_monitor, mnesia_subscr,mnesia,mb_mnesia,mb_mnesia_sup,ns_cluster, ns_cookie_manager,erl_epmd,inet_tcp_dist,gen_udp, inet_gethost_native,dist_manager,timeout_diag_logger, path_config,diag_handler,auth,ns_info,log_os_info, couch_config_writer,cb_init_loggers,mochiweb_acceptor, inet_tcp,gen_tcp,mochiweb_socket,mochiweb_socket_server, mochilists,mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view,snappy, couch_compress,couch_spatial_validation, couch_set_view_mapreduce,ejson,couch_doc, couch_db_update_notifier,couch_btree,couch_ref_counter, couch_uuids,couch_db_updater,couch_db,couch_auth_cache, couch_db_update_notifier_sup,couch_secondary_sup, couch_index_barrier,couch_event_sup,couch_log, couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {mnesia,"MNESIA CXC 138 12","4.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,560}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, ['sink-disk_xdcr','sink-disk_debug',ns_server_cluster_sup, 'sink-disk_couchdb','sink-disk_mapreduce_errors', couch_auth_cache,'sink-disk_views',erl_epmd, 'sink-disk_error',disk_log_sup,disk_log_server, code_server,application_controller,error_logger, couch_set_view,ale_sup,lhttpc_sup,ale_dynamic_sup, mochiweb_sup,auth,standard_error_sup,os_cmd_port_creator, kernel_safe_sup,lhttpc_manager,tftp_sup, couch_set_view_ddoc_cache,os_mon_sup, couch_index_merger_connection_pool,cpu_sup,couch_spatial, memsup,disksup,timer_server,couch_replica_index_barrier, couch_main_index_barrier,net_kernel,couch_replication, dist_manager,couch_task_events,rex,net_sup,couch_log, kernel_sup,global_name_server,file_server_2,cb_couch_sup, httpd_sup,ssl_connection_sup,'sink-disk_default', ssl_manager,ssl_broker_sup,ssl_server,sasl_safe_sup, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,ftp_sup,inets_sup,crypto_server, crypto_sup,sasl_sup,couch_secondary_services, couch_primary_services,couch_db_update, inet_gethost_native_sup,release_handler,couch_view, couch_uuids,overload,couch_task_status,alarm_handler, couch_server_sup,couch_server,dets_sup,dets,'sink-stderr', erl_prim_loader,couch_rep_sup,couch_query_servers, standard_error,init,couch_httpd,couch_file_write_guard, inet_gethost_native,couch_drv,inet_db, couch_db_update_notifier_sup,user,'sink-ns_log', couch_config,global_group,'sink-disk_stats', 'sink-disk_xdcr_errors']}, {cookie,bptrojzpwfmfrqou}, {wordsize,8}, {wall_clock,326}] [ns_server:info,2012-11-13T10:01:39.851,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:error,2012-11-13T10:01:39.860,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.3714.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.864,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3754.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.865,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3755.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3756.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.867,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.3758.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.868,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3753.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.869,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.3770.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.870,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3772.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.872,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3773.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.873,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3774.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.874,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3775.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3776.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:40.586,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:440]Using existing disk schema on ['ns_1@127.0.0.1']. [ns_server:debug,2012-11-13T10:01:40.587,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-week' [ns_server:debug,2012-11-13T10:01:40.587,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-week' [ns_server:debug,2012-11-13T10:01:40.588,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-day' [ns_server:debug,2012-11-13T10:01:40.588,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-day' [error_logger:info,2012-11-13T10:01:40.588,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3798.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:40.589,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-month' [ns_server:debug,2012-11-13T10:01:40.589,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-month' [error_logger:info,2012-11-13T10:01:40.589,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3799.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:40.590,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of local_config [ns_server:debug,2012-11-13T10:01:40.590,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-minute' [error_logger:info,2012-11-13T10:01:40.590,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3800.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:40.591,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-minute' [ns_server:debug,2012-11-13T10:01:40.592,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of cluster [error_logger:info,2012-11-13T10:01:40.592,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3801.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:40.592,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-year' [ns_server:debug,2012-11-13T10:01:40.592,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-year' [ns_server:debug,2012-11-13T10:01:40.593,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-default-hour' [ns_server:debug,2012-11-13T10:01:40.593,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:ensure_schema:450]Have local copy of 'stats_archiver-@system-hour' [error_logger:info,2012-11-13T10:01:40.593,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.3771.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:40.594,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:40.832,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:init:268]Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@127.0.0.1']}, {debug,verbose}, {directory,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/mnesia"}, {dump_log_load_regulation,false}, {dump_log_time_threshold,180000}, {dump_log_update_in_place,true}, {dump_log_write_threshold,1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup,false}, {fallback_error_function,{mnesia,lkill}}, {is_running,yes}, {local_tables,['stats_archiver-default-minute','stats_archiver-default-year', 'stats_archiver-default-week','stats_archiver-default-month', 'stats_archiver-default-day','stats_archiver-default-hour', 'stats_archiver-@system-year','stats_archiver-@system-week', 'stats_archiver-@system-month', 'stats_archiver-@system-minute','stats_archiver-@system-day', local_config,'stats_archiver-@system-hour',cluster,schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{8,0}}, {running_db_nodes,['ns_1@127.0.0.1']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers,[<0.3770.0>,<0.3759.0>]}, {tables,['stats_archiver-default-week','stats_archiver-@system-week', 'stats_archiver-default-day','stats_archiver-@system-day', 'stats_archiver-default-month','stats_archiver-@system-month', local_config,'stats_archiver-@system-minute', 'stats_archiver-default-minute',cluster, 'stats_archiver-default-year','stats_archiver-@system-year', 'stats_archiver-default-hour','stats_archiver-@system-hour',schema]}, {transaction_commits,2}, {transaction_failures,14}, {transaction_log_writes,0}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {send_compressed,0}, {version,"4.5"}] Peers: ['ns_1@127.0.0.1'] [ns_server:info,2012-11-13T10:01:40.834,ns_1@127.0.0.1:ns_config_sup<0.3873.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:40.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.3759.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.836,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3757.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:40.837,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3874.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:40.838,ns_1@127.0.0.1:ns_config<0.3876.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:40.838,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3875.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:40.839,ns_1@127.0.0.1:ns_config<0.3876.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [ns_server:debug,2012-11-13T10:01:40.839,ns_1@127.0.0.1:ns_config<0.3876.0>:ns_config:load_config:695]Here's full dynamic config we loaded: [[{{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {xdcr_worker_batch_size,100}, {xdcr_num_worker_process,4}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_connection_timeout,60}, {xdcr_checkpoint_interval,1800}, {xdcr_capi_checkpoint_timeout,10}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,2391}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}]] [ns_server:info,2012-11-13T10:01:40.851,ns_1@127.0.0.1:ns_config<0.3876.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,2391}, {nodes_wanted,['ns_1@127.0.0.1']}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}] [ns_server:debug,2012-11-13T10:01:40.864,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_config_isasl_sync:init:53]isasl_sync init: ["/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw", "_admin","_admin"] [error_logger:info,2012-11-13T10:01:40.864,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3876.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:40.864,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_config_isasl_sync:init:61]isasl_sync init buckets: ["default"] [ns_server:debug,2012-11-13T10:01:40.865,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [error_logger:info,2012-11-13T10:01:40.865,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3878.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T10:01:40.866,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T10:01:41.867,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3879.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.868,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3882.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.870,ns_1@127.0.0.1:ns_node_disco<0.3894.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [error_logger:info,2012-11-13T10:01:41.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3884.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.871,ns_1@127.0.0.1:ns_cookie_manager<0.3755.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T10:01:41.872,ns_1@127.0.0.1:<0.3895.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [error_logger:info,2012-11-13T10:01:41.872,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3873.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:41.872,ns_1@127.0.0.1:<0.3895.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T10:01:41.873,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:init:66]init pulling [ns_server:info,2012-11-13T10:01:41.873,ns_1@127.0.0.1:ns_node_disco_events<0.3893.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T10:01:41.873,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:init:68]init pushing [error_logger:info,2012-11-13T10:01:41.873,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3886.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.875,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3888.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.875,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2012-11-13T10:01:41.875,ns_1@127.0.0.1:ns_config_events<0.3874.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2012-11-13T10:01:41.875,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: auto_failover_cfg -> [{enabled,false},{timeout,30},{max_nodes,1},{count,0}] [error_logger:info,2012-11-13T10:01:41.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3889.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.876,ns_1@127.0.0.1:ns_config_events<0.3874.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2012-11-13T10:01:41.876,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([auto_failover_cfg,autocompaction,buckets, cluster_compat_version,dynamic_config_version, email_alerts]..) [ns_server:debug,2012-11-13T10:01:41.876,ns_1@127.0.0.1:ns_cookie_manager<0.3755.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T10:01:41.876,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:01:41.877,ns_1@127.0.0.1:<0.3904.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [error_logger:info,2012-11-13T10:01:41.878,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3890.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.878,ns_1@127.0.0.1:ns_log_events<0.3890.0>:ns_mail_log:init:44]ns_mail_log started up [ns_server:debug,2012-11-13T10:01:41.879,ns_1@127.0.0.1:ns_cookie_manager<0.3755.0>:ns_cookie_manager:do_cookie_sync:115]ns_cookie_manager do_cookie_sync [ns_server:debug,2012-11-13T10:01:41.879,ns_1@127.0.0.1:<0.3904.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T10:01:41.879,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:current_status:140]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2012-11-13T10:01:41.880,ns_1@127.0.0.1:<0.3914.0>:ns_node_disco:do_nodes_wanted_updated_fun:202]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:debug,2012-11-13T10:01:41.880,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[[{map,[{0,[],['ns_1@127.0.0.1',undefined]}, {1,[],['ns_1@127.0.0.1',undefined]}, {2,[],['ns_1@127.0.0.1',undefined]}, {3,[],['ns_1@127.0.0.1',undefined]}, {4,[],['ns_1@127.0.0.1',undefined]}, {5,[],['ns_1@127.0.0.1',undefined]}, {6,[],['ns_1@127.0.0.1',undefined]}, {7,[],['ns_1@127.0.0.1',undefined]}, {8,[],['ns_1@127.0.0.1',undefined]}, {9,[],['ns_1@127.0.0.1',undefined]}, {10,[],['ns_1@127.0.0.1',undefined]}, {11,[],['ns_1@127.0.0.1',undefined]}, {12,[],['ns_1@127.0.0.1',undefined]}, {13,[],['ns_1@127.0.0.1',undefined]}, {14,[],['ns_1@127.0.0.1',undefined]}, {15,[],['ns_1@127.0.0.1',undefined]}, {16,[],['ns_1@127.0.0.1',undefined]}, {17,[],['ns_1@127.0.0.1',undefined]}, {18,[],['ns_1@127.0.0.1',undefined]}, {19,[],['ns_1@127.0.0.1',undefined]}, {20,[],['ns_1@127.0.0.1',undefined]}, {21,[],['ns_1@127.0.0.1',undefined]}, {22,[],['ns_1@127.0.0.1',undefined]}, {23,[],['ns_1@127.0.0.1',undefined]}, {24,[],['ns_1@127.0.0.1',undefined]}, {25,[],['ns_1@127.0.0.1',undefined]}, {26,[],['ns_1@127.0.0.1',undefined]}, {27,[],['ns_1@127.0.0.1',undefined]}, {28,[],['ns_1@127.0.0.1',undefined]}, {29,[],['ns_1@127.0.0.1',undefined]}, {30,[],['ns_1@127.0.0.1',undefined]}, {31,[],['ns_1@127.0.0.1',undefined]}, {32,[],['ns_1@127.0.0.1',undefined]}, {33,[],['ns_1@127.0.0.1',undefined]}, {34,[],['ns_1@127.0.0.1',undefined]}, {35,[],['ns_1@127.0.0.1',undefined]}, {36,[],['ns_1@127.0.0.1',undefined]}, {37,[],['ns_1@127.0.0.1',undefined]}, {38,[],['ns_1@127.0.0.1',undefined]}, {39,[],['ns_1@127.0.0.1',undefined]}, {40,[],['ns_1@127.0.0.1',undefined]}, {41,[],['ns_1@127.0.0.1',undefined]}, {42,[],['ns_1@127.0.0.1',undefined]}, {43,[],['ns_1@127.0.0.1',undefined]}, {44,[],['ns_1@127.0.0.1',undefined]}, {45,[],['ns_1@127.0.0.1',undefined]}, {46,[],['ns_1@127.0.0.1',undefined]}, {47,[],['ns_1@127.0.0.1',undefined]}, {48,[],['ns_1@127.0.0.1',undefined]}, {49,[],['ns_1@127.0.0.1',undefined]}, {50,[],['ns_1@127.0.0.1',undefined]}, {51,[],['ns_1@127.0.0.1',undefined]}, {52,[],['ns_1@127.0.0.1',undefined]}, {53,[],['ns_1@127.0.0.1',undefined]}, {54,[],['ns_1@127.0.0.1',undefined]}, {55,[],['ns_1@127.0.0.1',undefined]}, {56,[],['ns_1@127.0.0.1',undefined]}, {57,[],['ns_1@127.0.0.1',undefined]}, {58,[],['ns_1@127.0.0.1',undefined]}, {59,[],['ns_1@127.0.0.1',undefined]}, {60,[],['ns_1@127.0.0.1',undefined]}, {61,[],['ns_1@127.0.0.1',undefined]}, {62,[],['ns_1@127.0.0.1',undefined]}, {63,[],['ns_1@127.0.0.1',undefined]}]}, {fastForwardMap,[]}, {uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}]]}] [error_logger:info,2012-11-13T10:01:41.880,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3893.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.881,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@127.0.0.1'}, {latest,minute}]}}} [ns_server:debug,2012-11-13T10:01:41.881,ns_1@127.0.0.1:<0.3914.0>:ns_node_disco:do_nodes_wanted_updated_fun:208]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: bptrojzpwfmfrqou [ns_server:info,2012-11-13T10:01:41.881,ns_1@127.0.0.1:remote_clusters_info<0.3919.0>:remote_clusters_info:read_or_create_table:384]Reading remote_clusters_info content from /Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/remote_clusters_cache [ns_server:debug,2012-11-13T10:01:41.883,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: cluster_compat_version -> [2,0] [ns_server:debug,2012-11-13T10:01:41.884,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:grab_local_xdcr_replications:307]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2012-11-13T10:01:41.884,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: dynamic_config_version -> [2,0] [error_logger:info,2012-11-13T10:01:41.884,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3894.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.885,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:error,2012-11-13T10:01:41.885,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2012-11-13T10:01:41.885,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2012-11-13T10:01:41.885,ns_1@127.0.0.1:ns_server_sup<0.3887.0>:mb_master:check_master_takeover_needed:144]Sending master node question to the following nodes: [] [error_logger:info,2012-11-13T10:01:41.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3897.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.887,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2012-11-13T10:01:41.887,ns_1@127.0.0.1:ns_server_sup<0.3887.0>:mb_master:check_master_takeover_needed:146]Got replies: [] [ns_server:debug,2012-11-13T10:01:41.887,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: max_bucket_count -> 10 [ns_server:debug,2012-11-13T10:01:41.888,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: memory_quota -> 2391 [ns_server:debug,2012-11-13T10:01:41.888,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T10:01:41.888,ns_1@127.0.0.1:ns_server_sup<0.3887.0>:mb_master:check_master_takeover_needed:152]Was unable to discover master, not going to force mastership takeover [error_logger:info,2012-11-13T10:01:41.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3898.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.889,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: otp -> [{cookie,bptrojzpwfmfrqou}] [ns_server:debug,2012-11-13T10:01:41.889,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: remote_clusters -> [] [user:info,2012-11-13T10:01:41.889,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:init:89]I'm the only node, so I'm the master. [ns_server:debug,2012-11-13T10:01:41.889,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: replication -> [{enabled,true}] [ns_server:debug,2012-11-13T10:01:41.890,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: rest -> [{port,8091}] [ns_server:debug,2012-11-13T10:01:41.890,ns_1@127.0.0.1:mb_master_sup<0.3928.0>:misc:start_singleton:855]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.3929.0> on 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:41.890,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3899.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.891,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [ns_server:debug,2012-11-13T10:01:41.891,ns_1@127.0.0.1:<0.3933.0>:janitor_agent:new_style_query_vbucket_states_loop:116]Exception from query_vbucket_states of "default":'ns_1@127.0.0.1' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@127.0.0.1'}, query_vbucket_states,infinity]}}} [ns_server:debug,2012-11-13T10:01:41.891,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2012-11-13T10:01:41.891,ns_1@127.0.0.1:mb_master_sup<0.3928.0>:misc:start_singleton:855]start_singleton(gen_server, ns_tick, [], []): started as <0.3934.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:41.892,ns_1@127.0.0.1:<0.3933.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:121]Waiting for "default" on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:41.892,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: settings -> [{stats,[{send_stats,false}]}] [error_logger:info,2012-11-13T10:01:41.892,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3900.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.892,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: uuid -> <<"b34a9c2e03786d913446a4e84919e1d5">> [ns_server:debug,2012-11-13T10:01:41.893,ns_1@127.0.0.1:<0.3935.0>:auto_failover:init:120]init auto_failover. [ns_server:debug,2012-11-13T10:01:41.893,ns_1@127.0.0.1:mb_master_sup<0.3928.0>:misc:start_singleton:855]start_singleton(gen_server, auto_failover, [], []): started as <0.3935.0> on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:41.893,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: vbucket_map_history -> [{[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}] [error_logger:info,2012-11-13T10:01:41.894,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3892.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:41.895,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_capi_checkpoint_timeout -> 10 [ns_server:debug,2012-11-13T10:01:41.896,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_checkpoint_interval -> 1800 [ns_server:debug,2012-11-13T10:01:41.896,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_connection_timeout -> 60 [ns_server:debug,2012-11-13T10:01:41.897,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_doc_batch_size_kb -> 512 [user:info,2012-11-13T10:01:41.897,ns_1@127.0.0.1:ns_server_sup<0.3887.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [ns_server:debug,2012-11-13T10:01:41.897,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_failure_restart_interval -> 30 [ns_server:debug,2012-11-13T10:01:41.897,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_num_http_connections -> 20 [ns_server:debug,2012-11-13T10:01:41.898,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_num_retries_per_request -> 2 [error_logger:info,2012-11-13T10:01:41.898,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3903.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.899,ns_1@127.0.0.1:<0.3966.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [ns_server:debug,2012-11-13T10:01:41.899,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_num_worker_process -> 4 [ns_server:debug,2012-11-13T10:01:41.900,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: xdcr_worker_batch_size -> 100 [ns_server:debug,2012-11-13T10:01:41.900,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2012-11-13T10:01:41.901,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2012-11-13T10:01:41.901,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2012-11-13T10:01:41.901,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2012-11-13T10:01:41.902,ns_1@127.0.0.1:<0.3969.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [ns_server:debug,2012-11-13T10:01:41.902,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,0} [error_logger:info,2012-11-13T10:01:41.902,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3907.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.902,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}] [ns_server:debug,2012-11-13T10:01:41.903,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2012-11-13T10:01:41.903,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}] [ns_server:debug,2012-11-13T10:01:41.904,ns_1@127.0.0.1:<0.3971.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [ns_server:debug,2012-11-13T10:01:41.905,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2012-11-13T10:01:41.906,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}] [error_logger:info,2012-11-13T10:01:41.906,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3908.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.908,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:info,2012-11-13T10:01:41.908,ns_1@127.0.0.1:<0.3974.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [ns_server:debug,2012-11-13T10:01:41.911,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [error_logger:info,2012-11-13T10:01:41.912,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3909.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.3911.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.923,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3910.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:41.925,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3912.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.926,ns_1@127.0.0.1:ns_bucket_worker<0.3977.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"default"}, {single_bucket_sup,start_link,["default"]}, permanent,infinity,supervisor, [single_bucket_sup]} [ns_server:debug,2012-11-13T10:01:41.928,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:init:218]Usable vbuckets: [48,32,16,0,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2, 18,53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,1,52,4,36,20,7,55, 39,23,58,42,26,10,61,45,29,13] [ns_server:debug,2012-11-13T10:01:41.931,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:01:41.931,ns_1@127.0.0.1:<0.4000.0>:supervisor_cushion:init:43]starting compaction_daemon with delay of 3000 [error_logger:info,2012-11-13T10:01:41.931,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3913.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.932,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [error_logger:info,2012-11-13T10:01:41.933,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3916.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.936,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.4004.0>:xdc_rdoc_replication_srv:handle_info:132]doing replicate_newnodes_docs [ns_server:info,2012-11-13T10:01:41.936,ns_1@127.0.0.1:set_view_update_daemon<0.4007.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2012-11-13T10:01:41.936,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3919.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T10:01:41.938,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [ns_server:debug,2012-11-13T10:01:41.938,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@127.0.0.1'}, {latest,minute}]}}} [error_logger:info,2012-11-13T10:01:41.938,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3929.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.934,ns_1@127.0.0.1:<0.4005.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [error_logger:info,2012-11-13T10:01:41.946,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3934.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.948,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3935.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.949,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3926.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:41.951,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3936.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.954,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3937.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.955,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3938.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.957,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3939.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.958,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:01:41.960,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([memory_quota]..) [ns_server:debug,2012-11-13T10:01:41.960,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: memory_quota -> 2391 [ns_server:info,2012-11-13T10:01:41.961,ns_1@127.0.0.1:<0.4005.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [error_logger:info,2012-11-13T10:01:41.961,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3940.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.961,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [error_logger:info,2012-11-13T10:01:41.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3943.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.967,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3960.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.971,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3961.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.973,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3962.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:41.985,ns_1@127.0.0.1:<0.4018.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1067950 [error_logger:info,2012-11-13T10:01:41.986,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3942.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:41.998,ns_1@127.0.0.1:<0.4018.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:01:42.007,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [error_logger:info,2012-11-13T10:01:42.009,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3964.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:01:42.012,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 29s [error_logger:info,2012-11-13T10:01:42.013,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3965.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.018,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3966.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.019,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3963.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.020,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3968.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.025,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3969.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.036,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3971.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.039,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3967.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.041,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3973.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.042,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3974.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.043,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3977.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.044,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3979.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.045,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3980.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.046,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3984.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.048,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3983.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.049,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3986.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.050,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3985.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.052,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3994.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.053,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3996.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.055,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3997.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.056,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3998.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.057,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3988.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.059,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4000.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.060,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4004.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.066,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4007.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.071,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4011.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.073,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3887.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:01:42.893,ns_1@127.0.0.1:<0.3933.0>:janitor_agent:new_style_query_vbucket_states_loop:116]Exception from query_vbucket_states of "default":'ns_1@127.0.0.1' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@127.0.0.1'}, query_vbucket_states,infinity]}}} [ns_server:debug,2012-11-13T10:01:42.893,ns_1@127.0.0.1:<0.3933.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:121]Waiting for "default" on 'ns_1@127.0.0.1' [ns_server:debug,2012-11-13T10:01:42.952,ns_1@127.0.0.1:<0.3966.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:debug,2012-11-13T10:01:42.953,ns_1@127.0.0.1:<0.3966.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.4019.0> [ns_server:info,2012-11-13T10:01:42.961,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:ensure_bucket:1119]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=2507145216;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:01:42.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4001.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.966,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4024.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.967,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4027.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:42.968,ns_1@127.0.0.1:janitor_agent-default<0.4029.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2012-11-13T10:01:42.969,ns_1@127.0.0.1:janitor_agent-default<0.4029.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:01:42.969,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4028.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4029.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.979,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4030.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [views:debug,2012-11-13T10:01:42.980,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:42.981,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",63,dead,1} [error_logger:info,2012-11-13T10:01:42.981,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4031.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4033.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.984,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4035.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [views:debug,2012-11-13T10:01:42.984,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:42.984,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",62,dead,1} [error_logger:info,2012-11-13T10:01:42.985,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4036.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [views:debug,2012-11-13T10:01:42.987,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:42.987,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",61,dead,1} [views:debug,2012-11-13T10:01:42.989,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:42.990,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",60,dead,1} [views:debug,2012-11-13T10:01:42.994,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:42.994,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",59,dead,1} [views:debug,2012-11-13T10:01:42.998,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:42.998,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",58,dead,1} [views:debug,2012-11-13T10:01:43.001,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.001,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",57,dead,1} [views:debug,2012-11-13T10:01:43.004,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.004,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",56,dead,1} [views:debug,2012-11-13T10:01:43.007,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.008,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",55,dead,1} [views:debug,2012-11-13T10:01:43.010,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.011,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",54,dead,1} [views:debug,2012-11-13T10:01:43.024,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.024,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",53,dead,1} [views:debug,2012-11-13T10:01:43.026,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.027,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",52,dead,1} [views:debug,2012-11-13T10:01:43.030,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.030,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",51,dead,1} [views:debug,2012-11-13T10:01:43.033,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.033,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",50,dead,1} [views:debug,2012-11-13T10:01:43.036,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.036,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",49,dead,1} [views:debug,2012-11-13T10:01:43.038,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.039,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",48,dead,1} [views:debug,2012-11-13T10:01:43.041,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.041,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",47,dead,1} [views:debug,2012-11-13T10:01:43.044,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.044,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",46,dead,1} [views:debug,2012-11-13T10:01:43.046,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.047,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",45,dead,1} [views:debug,2012-11-13T10:01:43.049,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.050,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",44,dead,1} [views:debug,2012-11-13T10:01:43.052,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.053,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",43,dead,1} [views:debug,2012-11-13T10:01:43.055,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.055,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",42,dead,1} [views:debug,2012-11-13T10:01:43.059,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.060,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",41,dead,1} [views:debug,2012-11-13T10:01:43.063,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.063,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",40,dead,1} [views:debug,2012-11-13T10:01:43.066,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.066,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",39,dead,1} [views:debug,2012-11-13T10:01:43.068,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.069,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",38,dead,1} [views:debug,2012-11-13T10:01:43.071,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.071,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",37,dead,1} [views:debug,2012-11-13T10:01:43.074,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.074,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",36,dead,1} [views:debug,2012-11-13T10:01:43.076,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.077,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",35,dead,1} [views:debug,2012-11-13T10:01:43.079,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.080,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",34,dead,1} [views:debug,2012-11-13T10:01:43.083,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.084,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",33,dead,1} [views:debug,2012-11-13T10:01:43.086,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.086,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",32,dead,1} [views:debug,2012-11-13T10:01:43.088,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.089,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",31,dead,1} [views:debug,2012-11-13T10:01:43.091,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.092,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",30,dead,1} [views:debug,2012-11-13T10:01:43.096,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.097,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",29,dead,1} [views:debug,2012-11-13T10:01:43.100,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.101,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",28,dead,1} [views:debug,2012-11-13T10:01:43.103,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.104,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",27,dead,1} [views:debug,2012-11-13T10:01:43.106,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.106,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",26,dead,1} [views:debug,2012-11-13T10:01:43.109,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.109,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",25,dead,1} [views:debug,2012-11-13T10:01:43.112,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.112,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",24,dead,1} [views:debug,2012-11-13T10:01:43.115,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.115,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",23,dead,1} [views:debug,2012-11-13T10:01:43.118,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.118,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",22,dead,1} [views:debug,2012-11-13T10:01:43.120,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.121,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",21,dead,1} [views:debug,2012-11-13T10:01:43.123,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.124,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",20,dead,1} [views:debug,2012-11-13T10:01:43.126,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.126,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",19,dead,1} [views:debug,2012-11-13T10:01:43.129,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.129,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",18,dead,1} [views:debug,2012-11-13T10:01:43.131,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.132,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",17,dead,1} [views:debug,2012-11-13T10:01:43.134,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.134,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",16,dead,1} [views:debug,2012-11-13T10:01:43.137,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.137,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",15,dead,1} [views:debug,2012-11-13T10:01:43.140,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.141,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",14,dead,1} [views:debug,2012-11-13T10:01:43.144,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.145,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",13,dead,1} [views:debug,2012-11-13T10:01:43.148,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.148,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",12,dead,1} [views:debug,2012-11-13T10:01:43.151,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: dead (1) [ns_server:info,2012-11-13T10:01:43.151,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:01:42.951086 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:01:42.952552 PST 3: Connected to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:01:42.960960 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.3972.0>: Tue Nov 13 10:01:42.965552 PST 3: Failed to load mutation log, falling back to key dump memcached<0.3972.0>: Tue Nov 13 10:01:42.972721 PST 3: metadata loaded in 18 ms memcached<0.3972.0>: Tue Nov 13 10:01:42.976599 PST 3: warmup completed in 21 ms [ns_server:debug,2012-11-13T10:01:43.151,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",11,dead,1} [views:debug,2012-11-13T10:01:43.154,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.154,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",10,dead,1} [views:debug,2012-11-13T10:01:43.156,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.157,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",9,dead,1} [views:debug,2012-11-13T10:01:43.159,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.160,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",8,dead,1} [views:debug,2012-11-13T10:01:43.162,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.163,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",7,dead,1} [views:debug,2012-11-13T10:01:43.166,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.167,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",6,dead,1} [views:debug,2012-11-13T10:01:43.169,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.169,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",5,dead,1} [views:debug,2012-11-13T10:01:43.172,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.172,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",4,dead,1} [views:debug,2012-11-13T10:01:43.175,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.175,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",3,dead,1} [views:debug,2012-11-13T10:01:43.178,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.179,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",2,dead,1} [views:debug,2012-11-13T10:01:43.184,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.185,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",1,dead,1} [views:debug,2012-11-13T10:01:43.188,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: dead (1) [ns_server:debug,2012-11-13T10:01:43.188,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",0,dead,1} [ns_server:debug,2012-11-13T10:01:43.432,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [user:info,2012-11-13T10:01:43.433,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_cast:581]Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:info,2012-11-13T10:01:43.895,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 0 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.895,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 1 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.895,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 2 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.896,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 3 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.896,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 4 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.896,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 5 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.897,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 6 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.897,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 7 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.897,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 8 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.903,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 9 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.903,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 10 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 11 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 12 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 13 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 14 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 15 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 16 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 17 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 18 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 19 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 20 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 21 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 22 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 23 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 24 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 25 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 26 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 27 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 28 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 29 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 30 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 31 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 32 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 33 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 34 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 35 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 36 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 37 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 38 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 39 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 40 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 41 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 42 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 43 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 44 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 45 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 46 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 47 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 48 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 49 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 50 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 51 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 52 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 53 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 54 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 55 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 56 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.913,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 57 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.913,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 58 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.913,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 59 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 60 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 61 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 62 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 63 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.916,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:info,2012-11-13T10:01:43.917,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:01:43.917,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:01:43.918,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:01:43.918,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [views:debug,2012-11-13T10:01:43.919,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: active (1) [ns_server:info,2012-11-13T10:01:43.919,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:debug,2012-11-13T10:01:43.919,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",63,active,1} [ns_server:info,2012-11-13T10:01:43.920,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [ns_server:info,2012-11-13T10:01:43.920,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:01:43.921,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:01:43.921,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:01:43.922,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [views:debug,2012-11-13T10:01:43.922,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: active (1) [ns_server:info,2012-11-13T10:01:43.922,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [ns_server:debug,2012-11-13T10:01:43.922,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",62,active,1} [ns_server:info,2012-11-13T10:01:43.923,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:info,2012-11-13T10:01:43.923,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:info,2012-11-13T10:01:43.923,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:info,2012-11-13T10:01:43.924,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:01:43.924,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [views:debug,2012-11-13T10:01:43.924,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: active (1) [ns_server:info,2012-11-13T10:01:43.925,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:debug,2012-11-13T10:01:43.925,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",61,active,1} [ns_server:info,2012-11-13T10:01:43.925,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [ns_server:info,2012-11-13T10:01:43.926,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:info,2012-11-13T10:01:43.926,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:01:43.926,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:01:43.927,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:01:43.927,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [views:debug,2012-11-13T10:01:43.927,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:43.928,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",60,active,1} [ns_server:info,2012-11-13T10:01:43.928,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:info,2012-11-13T10:01:43.929,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:01:43.930,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:01:43.931,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [views:debug,2012-11-13T10:01:43.931,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:43.932,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",59,active,1} [ns_server:info,2012-11-13T10:01:43.932,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:01:43.933,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:01:43.934,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:01:43.935,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [views:debug,2012-11-13T10:01:43.935,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: active (1) [ns_server:info,2012-11-13T10:01:43.936,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:debug,2012-11-13T10:01:43.936,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",58,active,1} [ns_server:info,2012-11-13T10:01:43.936,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:01:43.937,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:01:43.937,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:01:43.938,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:01:43.939,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [views:debug,2012-11-13T10:01:43.939,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: active (1) [ns_server:info,2012-11-13T10:01:43.940,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:debug,2012-11-13T10:01:43.940,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",57,active,1} [ns_server:info,2012-11-13T10:01:43.941,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:01:43.941,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:01:43.942,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:01:43.943,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [views:debug,2012-11-13T10:01:43.943,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: active (1) [ns_server:info,2012-11-13T10:01:43.943,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:debug,2012-11-13T10:01:43.944,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",56,active,1} [ns_server:info,2012-11-13T10:01:43.945,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:01:43.946,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:info,2012-11-13T10:01:43.947,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [views:debug,2012-11-13T10:01:43.947,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:43.947,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",55,active,1} [ns_server:info,2012-11-13T10:01:43.948,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:01:43.949,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:01:43.949,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:01:43.950,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [views:debug,2012-11-13T10:01:43.950,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:43.951,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",54,active,1} [ns_server:info,2012-11-13T10:01:43.952,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:info,2012-11-13T10:01:43.953,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:01:43.954,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:01:43.954,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [views:debug,2012-11-13T10:01:43.954,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:43.955,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",53,active,1} [ns_server:info,2012-11-13T10:01:43.956,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [ns_server:info,2012-11-13T10:01:43.957,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:01:43.958,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:01:43.959,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [views:debug,2012-11-13T10:01:43.959,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:43.959,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",52,active,1} [ns_server:info,2012-11-13T10:01:43.960,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:01:43.960,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:01:43.962,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [views:debug,2012-11-13T10:01:43.963,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: active (1) [ns_server:info,2012-11-13T10:01:43.963,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [ns_server:debug,2012-11-13T10:01:43.963,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",51,active,1} [ns_server:info,2012-11-13T10:01:43.964,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:01:43.965,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "default" [ns_server:info,2012-11-13T10:01:43.966,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_call:248]Bucket "default" marked as warmed in 1 seconds [views:debug,2012-11-13T10:01:43.966,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:43.966,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",50,active,1} [views:debug,2012-11-13T10:01:44.034,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.034,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",49,active,1} [views:debug,2012-11-13T10:01:44.118,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.118,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",48,active,1} [views:debug,2012-11-13T10:01:44.127,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.127,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",47,active,1} [views:debug,2012-11-13T10:01:44.130,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.131,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",46,active,1} [views:debug,2012-11-13T10:01:44.133,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.133,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",45,active,1} [views:debug,2012-11-13T10:01:44.136,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.137,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",44,active,1} [views:debug,2012-11-13T10:01:44.140,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.140,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",43,active,1} [views:debug,2012-11-13T10:01:44.143,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.143,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",42,active,1} [views:debug,2012-11-13T10:01:44.146,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.146,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",41,active,1} [views:debug,2012-11-13T10:01:44.149,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.149,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",40,active,1} [views:debug,2012-11-13T10:01:44.152,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.152,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",39,active,1} [views:debug,2012-11-13T10:01:44.154,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.155,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",38,active,1} [views:debug,2012-11-13T10:01:44.157,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.158,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",37,active,1} [views:debug,2012-11-13T10:01:44.160,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.160,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",36,active,1} [views:debug,2012-11-13T10:01:44.163,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.163,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",35,active,1} [views:debug,2012-11-13T10:01:44.171,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.171,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",34,active,1} [views:debug,2012-11-13T10:01:44.174,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.174,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",33,active,1} [views:debug,2012-11-13T10:01:44.178,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.179,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",32,active,1} [views:debug,2012-11-13T10:01:44.181,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.182,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",31,active,1} [views:debug,2012-11-13T10:01:44.184,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.184,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",30,active,1} [views:debug,2012-11-13T10:01:44.193,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.193,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",29,active,1} [views:debug,2012-11-13T10:01:44.204,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.205,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",28,active,1} [views:debug,2012-11-13T10:01:44.216,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.216,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",27,active,1} [views:debug,2012-11-13T10:01:44.225,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.226,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",26,active,1} [views:debug,2012-11-13T10:01:44.228,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.229,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",25,active,1} [views:debug,2012-11-13T10:01:44.238,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.239,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",24,active,1} [views:debug,2012-11-13T10:01:44.249,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.249,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",23,active,1} [views:debug,2012-11-13T10:01:44.252,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.253,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",22,active,1} [views:debug,2012-11-13T10:01:44.256,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.256,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",21,active,1} [views:debug,2012-11-13T10:01:44.260,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.261,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",20,active,1} [views:debug,2012-11-13T10:01:44.264,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.265,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",19,active,1} [views:debug,2012-11-13T10:01:44.269,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.269,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",18,active,1} [views:debug,2012-11-13T10:01:44.273,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.274,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",17,active,1} [views:debug,2012-11-13T10:01:44.276,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.277,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",16,active,1} [views:debug,2012-11-13T10:01:44.280,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.281,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",15,active,1} [views:debug,2012-11-13T10:01:44.283,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.283,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",14,active,1} [views:debug,2012-11-13T10:01:44.285,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.286,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",13,active,1} [views:debug,2012-11-13T10:01:44.289,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.289,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",12,active,1} [views:debug,2012-11-13T10:01:44.292,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.293,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",11,active,1} [views:debug,2012-11-13T10:01:44.298,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.299,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",10,active,1} [views:debug,2012-11-13T10:01:44.301,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.302,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",9,active,1} [views:debug,2012-11-13T10:01:44.305,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.305,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",8,active,1} [views:debug,2012-11-13T10:01:44.308,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.308,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",7,active,1} [views:debug,2012-11-13T10:01:44.314,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.314,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",6,active,1} [views:debug,2012-11-13T10:01:44.317,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.317,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",5,active,1} [views:debug,2012-11-13T10:01:44.321,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.321,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",4,active,1} [views:debug,2012-11-13T10:01:44.324,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.324,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",3,active,1} [views:debug,2012-11-13T10:01:44.327,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.327,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",2,active,1} [views:debug,2012-11-13T10:01:44.330,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.330,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",1,active,1} [views:debug,2012-11-13T10:01:44.333,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: active (1) [ns_server:debug,2012-11-13T10:01:44.334,ns_1@127.0.0.1:<0.4019.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"default",0,active,1} [ns_server:info,2012-11-13T10:01:46.906,ns_1@127.0.0.1:ns_doctor<0.3916.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["default"] [ns_server:debug,2012-11-13T10:02:11.015,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:02:11.016,ns_1@127.0.0.1:<0.4165.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:02:11.017,ns_1@127.0.0.1:<0.4165.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:02:11.025,ns_1@127.0.0.1:<0.4168.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:02:11.026,ns_1@127.0.0.1:<0.4168.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:02:11.026,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:02:11.026,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:warn,2012-11-13T10:02:50.917,ns_1@127.0.0.1:menelaus_web_alerts_srv<0.3962.0>:menelaus_web_alerts_srv:handle_info:172]Eaten 5 previously unconsumed check_alerts [ns_server:warn,2012-11-13T10:02:50.918,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:handle_info:218]Skipped 7 heartbeats [ns_server:debug,2012-11-13T10:02:50.918,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [stats:warn,2012-11-13T10:02:50.920,ns_1@127.0.0.1:system_stats_collector<0.3985.0>:system_stats_collector:handle_info:133]lost 16 ticks [ns_server:info,2012-11-13T10:02:50.922,ns_1@127.0.0.1:<0.4290.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:warn,2012-11-13T10:03:12.532,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:handle_info:218]Skipped 1 heartbeats [ns_server:info,2012-11-13T10:03:12.532,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:handle_info:282]Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.4284.0>} [ns_server:error,2012-11-13T10:03:12.532,ns_1@127.0.0.1:<0.4284.0>:ns_janitor:cleanup_with_states:92]Bucket "default" not yet ready on ['ns_1@127.0.0.1'] [stats:warn,2012-11-13T10:03:12.533,ns_1@127.0.0.1:system_stats_collector<0.3985.0>:system_stats_collector:handle_info:133]lost 15 ticks [ns_server:info,2012-11-13T10:03:12.533,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:handle_info:282]Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.4284.0>} [ns_server:warn,2012-11-13T10:03:12.533,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:handle_info:218]Skipped 8 heartbeats [ns_server:info,2012-11-13T10:03:12.533,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:handle_info:282]Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.4284.0>} [stats:warn,2012-11-13T10:03:14.782,ns_1@127.0.0.1:system_stats_collector<0.3985.0>:system_stats_collector:handle_info:133]lost 6 ticks [ns_server:error,2012-11-13T10:03:15.007,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_info:630]handle_info(ensure_bucket,..) took too long: 24090341 us [ns_server:info,2012-11-13T10:03:15.407,ns_1@127.0.0.1:<0.4290.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [stats:warn,2012-11-13T10:03:15.410,ns_1@127.0.0.1:<0.4031.0>:stats_collector:latest_tick:223]Dropped 40 ticks [ns_server:debug,2012-11-13T10:03:15.415,ns_1@127.0.0.1:<0.4307.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:03:15.416,ns_1@127.0.0.1:<0.4307.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:03:15.416,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:03:15.416,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 5s [ns_server:debug,2012-11-13T10:03:20.417,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:03:20.418,ns_1@127.0.0.1:<0.4336.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:03:20.419,ns_1@127.0.0.1:<0.4336.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:03:20.428,ns_1@127.0.0.1:<0.4339.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:03:20.428,ns_1@127.0.0.1:<0.4339.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:03:20.429,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:03:20.429,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:03:50.430,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:03:50.431,ns_1@127.0.0.1:<0.4469.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:03:50.432,ns_1@127.0.0.1:<0.4469.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:03:50.440,ns_1@127.0.0.1:<0.4472.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:03:50.440,ns_1@127.0.0.1:<0.4472.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:03:50.441,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:03:50.441,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:04:20.443,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:04:20.444,ns_1@127.0.0.1:<0.4605.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:04:20.445,ns_1@127.0.0.1:<0.4605.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:04:20.453,ns_1@127.0.0.1:<0.4608.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:04:20.453,ns_1@127.0.0.1:<0.4608.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:04:20.453,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:04:20.454,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:04:50.455,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:04:50.456,ns_1@127.0.0.1:<0.4759.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:04:50.457,ns_1@127.0.0.1:<0.4759.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:04:50.464,ns_1@127.0.0.1:<0.4762.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:04:50.464,ns_1@127.0.0.1:<0.4762.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:04:50.465,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:04:50.465,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:05:20.467,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:05:20.468,ns_1@127.0.0.1:<0.4895.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:05:20.469,ns_1@127.0.0.1:<0.4895.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:05:20.477,ns_1@127.0.0.1:<0.4898.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:05:20.477,ns_1@127.0.0.1:<0.4898.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:05:20.477,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:05:20.477,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:05:50.478,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:05:50.479,ns_1@127.0.0.1:<0.5028.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:05:50.481,ns_1@127.0.0.1:<0.5028.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:05:50.489,ns_1@127.0.0.1:<0.5031.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:05:50.489,ns_1@127.0.0.1:<0.5031.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:05:50.490,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:05:50.490,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:06:20.492,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:06:20.494,ns_1@127.0.0.1:<0.5164.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:06:20.495,ns_1@127.0.0.1:<0.5164.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:06:20.504,ns_1@127.0.0.1:<0.5167.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:06:20.504,ns_1@127.0.0.1:<0.5167.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:06:20.505,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:06:20.505,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:06:50.506,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:06:50.508,ns_1@127.0.0.1:<0.5297.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:06:50.509,ns_1@127.0.0.1:<0.5297.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:06:50.518,ns_1@127.0.0.1:<0.5300.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:06:50.518,ns_1@127.0.0.1:<0.5300.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:06:50.519,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:06:50.519,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:07:20.520,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:07:20.522,ns_1@127.0.0.1:<0.5433.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:07:20.524,ns_1@127.0.0.1:<0.5433.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:07:20.532,ns_1@127.0.0.1:<0.5436.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:07:20.532,ns_1@127.0.0.1:<0.5436.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:07:20.533,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:07:20.533,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:07:50.534,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:07:50.536,ns_1@127.0.0.1:<0.5587.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:07:50.537,ns_1@127.0.0.1:<0.5587.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:07:50.545,ns_1@127.0.0.1:<0.5590.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:07:50.545,ns_1@127.0.0.1:<0.5590.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:07:50.545,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:07:50.545,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:08:20.547,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:08:20.548,ns_1@127.0.0.1:<0.5723.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:08:20.549,ns_1@127.0.0.1:<0.5723.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:08:20.559,ns_1@127.0.0.1:<0.5726.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:08:20.559,ns_1@127.0.0.1:<0.5726.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:08:20.560,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:08:20.560,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:08:50.561,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:08:50.563,ns_1@127.0.0.1:<0.5856.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:08:50.565,ns_1@127.0.0.1:<0.5856.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:08:50.573,ns_1@127.0.0.1:<0.5859.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:08:50.573,ns_1@127.0.0.1:<0.5859.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:08:50.574,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:08:50.574,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:09:20.575,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:09:20.576,ns_1@127.0.0.1:<0.5992.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:09:20.578,ns_1@127.0.0.1:<0.5992.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:09:20.586,ns_1@127.0.0.1:<0.5995.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:09:20.586,ns_1@127.0.0.1:<0.5995.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:09:20.586,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:09:20.587,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:09:50.587,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:09:50.588,ns_1@127.0.0.1:<0.6125.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:09:50.589,ns_1@127.0.0.1:<0.6125.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:09:50.598,ns_1@127.0.0.1:<0.6128.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:09:50.598,ns_1@127.0.0.1:<0.6128.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:09:50.599,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:09:50.599,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:10:20.600,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:10:20.601,ns_1@127.0.0.1:<0.6261.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:10:20.602,ns_1@127.0.0.1:<0.6261.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:10:20.610,ns_1@127.0.0.1:<0.6264.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:10:20.610,ns_1@127.0.0.1:<0.6264.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:10:20.611,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:10:20.611,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:10:50.612,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:10:50.614,ns_1@127.0.0.1:<0.6419.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:10:50.616,ns_1@127.0.0.1:<0.6419.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:10:50.624,ns_1@127.0.0.1:<0.6422.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:10:50.624,ns_1@127.0.0.1:<0.6422.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:10:50.625,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:10:50.625,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:11:20.626,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:11:20.628,ns_1@127.0.0.1:<0.6555.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:11:20.629,ns_1@127.0.0.1:<0.6555.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:11:20.638,ns_1@127.0.0.1:<0.6558.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:11:20.638,ns_1@127.0.0.1:<0.6558.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:11:20.638,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:11:20.639,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:11:50.640,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:11:50.642,ns_1@127.0.0.1:<0.6688.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:11:50.643,ns_1@127.0.0.1:<0.6688.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:11:50.652,ns_1@127.0.0.1:<0.6691.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:11:50.652,ns_1@127.0.0.1:<0.6691.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:11:50.652,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:11:50.652,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:12:20.653,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:12:20.654,ns_1@127.0.0.1:<0.6824.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:12:20.655,ns_1@127.0.0.1:<0.6824.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:12:20.663,ns_1@127.0.0.1:<0.6827.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:12:20.663,ns_1@127.0.0.1:<0.6827.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:12:20.664,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:12:20.664,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:12:50.665,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:12:50.666,ns_1@127.0.0.1:<0.6957.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:12:50.667,ns_1@127.0.0.1:<0.6957.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:12:50.674,ns_1@127.0.0.1:<0.6960.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:12:50.674,ns_1@127.0.0.1:<0.6960.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:12:50.675,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:12:50.675,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:13:20.675,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:13:20.677,ns_1@127.0.0.1:<0.7093.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:13:20.679,ns_1@127.0.0.1:<0.7093.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:13:20.687,ns_1@127.0.0.1:<0.7096.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:13:20.687,ns_1@127.0.0.1:<0.7096.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:13:20.688,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:13:20.688,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:13:50.689,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:13:50.690,ns_1@127.0.0.1:<0.7247.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:13:50.691,ns_1@127.0.0.1:<0.7247.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:13:50.700,ns_1@127.0.0.1:<0.7250.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:13:50.700,ns_1@127.0.0.1:<0.7250.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:13:50.700,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:13:50.701,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:14:20.702,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:14:20.704,ns_1@127.0.0.1:<0.7383.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:14:20.706,ns_1@127.0.0.1:<0.7383.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:14:20.714,ns_1@127.0.0.1:<0.7386.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:14:20.714,ns_1@127.0.0.1:<0.7386.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:14:20.715,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:14:20.715,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:14:50.715,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:14:50.717,ns_1@127.0.0.1:<0.7516.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:14:50.718,ns_1@127.0.0.1:<0.7516.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:14:50.727,ns_1@127.0.0.1:<0.7519.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:14:50.727,ns_1@127.0.0.1:<0.7519.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:14:50.727,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:14:50.728,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:15:20.729,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:15:20.730,ns_1@127.0.0.1:<0.7652.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:15:20.732,ns_1@127.0.0.1:<0.7652.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:15:20.741,ns_1@127.0.0.1:<0.7655.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:15:20.741,ns_1@127.0.0.1:<0.7655.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:15:20.741,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:15:20.742,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:15:50.742,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:15:50.743,ns_1@127.0.0.1:<0.7785.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:15:50.745,ns_1@127.0.0.1:<0.7785.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:15:50.753,ns_1@127.0.0.1:<0.7788.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:15:50.753,ns_1@127.0.0.1:<0.7788.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:15:50.754,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:15:50.754,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:16:20.755,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:16:20.757,ns_1@127.0.0.1:<0.7921.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:16:20.759,ns_1@127.0.0.1:<0.7921.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:16:20.767,ns_1@127.0.0.1:<0.7924.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:16:20.767,ns_1@127.0.0.1:<0.7924.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:16:20.768,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:16:20.768,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:16:50.768,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:16:50.769,ns_1@127.0.0.1:<0.8079.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:16:50.770,ns_1@127.0.0.1:<0.8079.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:16:50.778,ns_1@127.0.0.1:<0.8082.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:16:50.779,ns_1@127.0.0.1:<0.8082.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:16:50.779,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:16:50.779,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:17:20.781,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:17:20.783,ns_1@127.0.0.1:<0.8215.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:17:20.784,ns_1@127.0.0.1:<0.8215.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:17:20.792,ns_1@127.0.0.1:<0.8218.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:17:20.792,ns_1@127.0.0.1:<0.8218.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:17:20.793,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:17:20.793,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:17:50.794,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:17:50.796,ns_1@127.0.0.1:<0.8348.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:17:50.798,ns_1@127.0.0.1:<0.8348.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:17:50.808,ns_1@127.0.0.1:<0.8351.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:17:50.808,ns_1@127.0.0.1:<0.8351.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:17:50.808,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:17:50.808,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:18:20.810,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:18:20.812,ns_1@127.0.0.1:<0.8484.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:18:20.813,ns_1@127.0.0.1:<0.8484.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:18:20.822,ns_1@127.0.0.1:<0.8487.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:18:20.823,ns_1@127.0.0.1:<0.8487.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:18:20.823,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:18:20.823,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:18:50.825,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:18:50.827,ns_1@127.0.0.1:<0.8617.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:18:50.828,ns_1@127.0.0.1:<0.8617.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:18:50.837,ns_1@127.0.0.1:<0.8620.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:18:50.837,ns_1@127.0.0.1:<0.8620.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:18:50.837,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:18:50.837,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:19:20.838,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:19:20.840,ns_1@127.0.0.1:<0.8753.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:19:20.842,ns_1@127.0.0.1:<0.8753.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:19:20.850,ns_1@127.0.0.1:<0.8756.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:19:20.850,ns_1@127.0.0.1:<0.8756.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:19:20.850,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:19:20.851,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:19:50.852,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:19:50.854,ns_1@127.0.0.1:<0.8907.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:19:50.856,ns_1@127.0.0.1:<0.8907.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:19:50.864,ns_1@127.0.0.1:<0.8910.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:19:50.864,ns_1@127.0.0.1:<0.8910.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:19:50.864,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:19:50.865,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:20:20.866,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:20:20.866,ns_1@127.0.0.1:<0.9043.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:20:20.867,ns_1@127.0.0.1:<0.9043.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:20:20.875,ns_1@127.0.0.1:<0.9046.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:20:20.875,ns_1@127.0.0.1:<0.9046.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:20:20.876,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:20:20.876,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:20:50.877,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:20:50.879,ns_1@127.0.0.1:<0.9176.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:20:50.881,ns_1@127.0.0.1:<0.9176.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:20:50.890,ns_1@127.0.0.1:<0.9179.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:20:50.890,ns_1@127.0.0.1:<0.9179.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:20:50.890,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:20:50.891,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:21:20.893,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:21:20.896,ns_1@127.0.0.1:<0.9312.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:21:20.897,ns_1@127.0.0.1:<0.9312.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:21:20.906,ns_1@127.0.0.1:<0.9317.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:21:20.906,ns_1@127.0.0.1:<0.9317.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:21:20.907,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:21:20.907,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:21:50.909,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:21:50.910,ns_1@127.0.0.1:<0.9447.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:21:50.911,ns_1@127.0.0.1:<0.9447.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:21:50.920,ns_1@127.0.0.1:<0.9450.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:21:50.920,ns_1@127.0.0.1:<0.9450.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:21:50.920,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:21:50.921,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:22:20.922,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:22:20.923,ns_1@127.0.0.1:<0.9583.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:22:20.925,ns_1@127.0.0.1:<0.9583.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:22:20.933,ns_1@127.0.0.1:<0.9586.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:22:20.934,ns_1@127.0.0.1:<0.9586.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:22:20.934,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:22:20.934,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:22:50.935,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:22:50.936,ns_1@127.0.0.1:<0.9741.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:22:50.937,ns_1@127.0.0.1:<0.9741.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:22:50.945,ns_1@127.0.0.1:<0.9744.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:22:50.946,ns_1@127.0.0.1:<0.9744.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:22:50.946,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:22:50.946,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [menelaus:info,2012-11-13T10:23:04.205,ns_1@127.0.0.1:<0.3950.0>:menelaus_web:loop:369]Invalid delete received: {mochiweb_request,#Port<0.12920>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] [ns_server:debug,2012-11-13T10:23:20.948,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:23:20.950,ns_1@127.0.0.1:<0.9884.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:23:20.952,ns_1@127.0.0.1:<0.9884.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:23:20.961,ns_1@127.0.0.1:<0.9887.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:23:20.961,ns_1@127.0.0.1:<0.9887.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:23:20.961,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:23:20.962,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:23:50.962,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:23:50.964,ns_1@127.0.0.1:<0.10017.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:23:50.965,ns_1@127.0.0.1:<0.10017.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:23:50.974,ns_1@127.0.0.1:<0.10020.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:23:50.974,ns_1@127.0.0.1:<0.10020.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:23:50.974,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:23:50.975,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:24:08.825,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:24:08.826,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([rest,rest_creds]..) [ns_server:info,2012-11-13T10:24:08.828,ns_1@127.0.0.1:ns_config_events<0.3874.0>:ns_port_sup:terminate_port:129]unsupervising port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:debug,2012-11-13T10:24:08.829,ns_1@127.0.0.1:<0.3970.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:info,2012-11-13T10:24:08.830,ns_1@127.0.0.1:<0.3970.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:24:08.830,ns_1@127.0.0.1:<0.3970.0>:ns_port_server:log:171]moxi<0.3970.0>: EOL on stdin. Exiting [ns_server:info,2012-11-13T10:24:08.830,ns_1@127.0.0.1:ns_config_events<0.3874.0>:ns_port_sup:launch_port:74]supervising port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:debug,2012-11-13T10:24:08.831,ns_1@127.0.0.1:<0.10104.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [ns_server:debug,2012-11-13T10:24:08.834,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: rest -> [{port,8091}] [ns_server:debug,2012-11-13T10:24:08.834,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:info,2012-11-13T10:24:08.834,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [ns_server:debug,2012-11-13T10:24:08.835,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:24:08.835,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [error_logger:info,2012-11-13T10:24:08.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.10104.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:24:09.041,ns_1@127.0.0.1:<0.10105.0>:ns_port_server:log:171]moxi<0.10105.0>: 2012-11-13 10:24:08: (cproxy_config.c.317) env: MOXI_SASL_PLAIN_USR (13) moxi<0.10105.0>: 2012-11-13 10:24:08: (cproxy_config.c.326) env: MOXI_SASL_PLAIN_PWD (8) [ns_server:debug,2012-11-13T10:24:20.977,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:24:20.978,ns_1@127.0.0.1:<0.10165.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:24:20.978,ns_1@127.0.0.1:<0.10165.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:24:20.987,ns_1@127.0.0.1:<0.10168.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:24:20.987,ns_1@127.0.0.1:<0.10168.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:24:20.987,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:24:20.987,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:24:50.988,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:24:50.990,ns_1@127.0.0.1:<0.10298.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:24:50.992,ns_1@127.0.0.1:<0.10298.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:24:51.000,ns_1@127.0.0.1:<0.10301.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:24:51.001,ns_1@127.0.0.1:<0.10301.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:24:51.001,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:24:51.001,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 29s [ns_server:debug,2012-11-13T10:25:20.003,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:25:20.004,ns_1@127.0.0.1:<0.10433.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:25:20.005,ns_1@127.0.0.1:<0.10433.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:25:20.014,ns_1@127.0.0.1:<0.10436.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:25:20.014,ns_1@127.0.0.1:<0.10436.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:25:20.014,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:25:20.014,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:25:50.015,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:25:50.016,ns_1@127.0.0.1:<0.10588.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:25:50.017,ns_1@127.0.0.1:<0.10588.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:25:50.025,ns_1@127.0.0.1:<0.10591.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:25:50.025,ns_1@127.0.0.1:<0.10591.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:25:50.026,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:25:50.026,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:26:20.027,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:26:20.028,ns_1@127.0.0.1:<0.10726.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:26:20.029,ns_1@127.0.0.1:<0.10726.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:26:20.037,ns_1@127.0.0.1:<0.10729.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:26:20.037,ns_1@127.0.0.1:<0.10729.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:26:20.038,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:26:20.038,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:26:50.039,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:26:50.040,ns_1@127.0.0.1:<0.10861.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:26:50.041,ns_1@127.0.0.1:<0.10861.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:26:50.049,ns_1@127.0.0.1:<0.10864.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 5667, file 1500541 [ns_server:debug,2012-11-13T10:26:50.049,ns_1@127.0.0.1:<0.10864.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:26:50.049,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:26:50.050,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [menelaus:info,2012-11-13T10:26:53.260,ns_1@127.0.0.1:<0.3958.0>:menelaus_web:loop:369]Invalid delete received: {mochiweb_request,#Port<0.13316>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] [menelaus:info,2012-11-13T10:26:58.156,ns_1@127.0.0.1:<0.10858.0>:menelaus_web:loop:369]Invalid delete received: {mochiweb_request,#Port<0.13324>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] [ns_server:debug,2012-11-13T10:27:20.051,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2012-11-13T10:27:20.053,ns_1@127.0.0.1:<0.11001.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:27:20.054,ns_1@127.0.0.1:<0.11001.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:27:20.063,ns_1@127.0.0.1:<0.11004.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `default`: data 6045, file 1600430 [ns_server:debug,2012-11-13T10:27:20.063,ns_1@127.0.0.1:<0.11004.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `default` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:27:20.064,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:27:20.064,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:27:34.331,ns_1@127.0.0.1:ns_bucket_worker<0.3977.0>:ns_bucket_sup:update_childs:91]Stopping child for dead bucket: {{per_bucket_sup,"default"}, <0.3986.0>,supervisor, [single_bucket_sup]} [ns_server:debug,2012-11-13T10:27:34.331,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:27:34.331,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:27:34.331,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[]}] [ns_server:debug,2012-11-13T10:27:34.331,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2012-11-13T10:27:34.332,ns_1@127.0.0.1:<0.3986.0>:single_bucket_sup:top_loop:28]Delegating exit {'EXIT',<0.3983.0>,shutdown} to child supervisor: <0.3987.0> [ns_server:debug,2012-11-13T10:27:34.332,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [ns_server:debug,2012-11-13T10:27:34.332,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:27:34.333,ns_1@127.0.0.1:<0.4034.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_stats_event,<0.4033.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:27:34.333,ns_1@127.0.0.1:capi_set_view_manager-default<0.3988.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:27:34.333,ns_1@127.0.0.1:<0.4032.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_tick_event,<0.4031.0>} exited with reason shutdown [user:info,2012-11-13T10:27:34.333,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:terminate:661]Shutting down bucket "default" on 'ns_1@127.0.0.1' for deletion [ns_server:debug,2012-11-13T10:27:34.347,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:terminate:686]Proceeding into vbuckets dbs deletions [ns_server:info,2012-11-13T10:27:34.413,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/master">>: ok [ns_server:info,2012-11-13T10:27:34.415,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/9">>: ok [ns_server:info,2012-11-13T10:27:34.417,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/8">>: ok [ns_server:info,2012-11-13T10:27:34.419,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/7">>: ok [ns_server:info,2012-11-13T10:27:34.420,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/63">>: ok [ns_server:info,2012-11-13T10:27:34.422,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/62">>: ok [ns_server:info,2012-11-13T10:27:34.424,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/61">>: ok [ns_server:info,2012-11-13T10:27:34.425,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/60">>: ok [ns_server:info,2012-11-13T10:27:34.427,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/6">>: ok [ns_server:info,2012-11-13T10:27:34.429,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/59">>: ok [ns_server:info,2012-11-13T10:27:34.430,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/58">>: ok [ns_server:info,2012-11-13T10:27:34.433,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/57">>: ok [ns_server:info,2012-11-13T10:27:34.436,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/56">>: ok [ns_server:info,2012-11-13T10:27:34.437,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/55">>: ok [ns_server:info,2012-11-13T10:27:34.439,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/54">>: ok [ns_server:info,2012-11-13T10:27:34.440,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/53">>: ok [ns_server:info,2012-11-13T10:27:34.442,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/52">>: ok [ns_server:info,2012-11-13T10:27:34.443,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/51">>: ok [ns_server:info,2012-11-13T10:27:34.445,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/50">>: ok [ns_server:info,2012-11-13T10:27:34.446,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/5">>: ok [ns_server:info,2012-11-13T10:27:34.448,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/49">>: ok [ns_server:info,2012-11-13T10:27:34.449,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/48">>: ok [ns_server:info,2012-11-13T10:27:34.451,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/47">>: ok [ns_server:info,2012-11-13T10:27:34.452,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/46">>: ok [ns_server:info,2012-11-13T10:27:34.453,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/45">>: ok [ns_server:info,2012-11-13T10:27:34.455,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/44">>: ok [ns_server:info,2012-11-13T10:27:34.456,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/43">>: ok [ns_server:info,2012-11-13T10:27:34.458,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/42">>: ok [ns_server:info,2012-11-13T10:27:34.459,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/41">>: ok [ns_server:info,2012-11-13T10:27:34.461,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/40">>: ok [ns_server:info,2012-11-13T10:27:34.462,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/4">>: ok [ns_server:info,2012-11-13T10:27:34.464,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/39">>: ok [ns_server:info,2012-11-13T10:27:34.465,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/38">>: ok [ns_server:info,2012-11-13T10:27:34.466,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/37">>: ok [ns_server:info,2012-11-13T10:27:34.468,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/36">>: ok [ns_server:info,2012-11-13T10:27:34.469,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/35">>: ok [ns_server:info,2012-11-13T10:27:34.471,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/34">>: ok [ns_server:info,2012-11-13T10:27:34.472,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/33">>: ok [ns_server:info,2012-11-13T10:27:34.473,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/32">>: ok [ns_server:info,2012-11-13T10:27:34.475,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/31">>: ok [ns_server:info,2012-11-13T10:27:34.476,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/30">>: ok [ns_server:info,2012-11-13T10:27:34.478,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/3">>: ok [ns_server:info,2012-11-13T10:27:34.479,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/29">>: ok [ns_server:info,2012-11-13T10:27:34.481,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/28">>: ok [ns_server:info,2012-11-13T10:27:34.482,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/27">>: ok [ns_server:info,2012-11-13T10:27:34.483,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/26">>: ok [ns_server:info,2012-11-13T10:27:34.484,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/25">>: ok [ns_server:info,2012-11-13T10:27:34.486,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/24">>: ok [ns_server:info,2012-11-13T10:27:34.487,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/23">>: ok [ns_server:info,2012-11-13T10:27:34.488,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/22">>: ok [ns_server:info,2012-11-13T10:27:34.490,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/21">>: ok [ns_server:info,2012-11-13T10:27:34.491,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/20">>: ok [ns_server:info,2012-11-13T10:27:34.492,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/2">>: ok [ns_server:info,2012-11-13T10:27:34.493,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/19">>: ok [ns_server:info,2012-11-13T10:27:34.495,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/18">>: ok [ns_server:info,2012-11-13T10:27:34.496,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/17">>: ok [ns_server:info,2012-11-13T10:27:34.497,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/16">>: ok [ns_server:info,2012-11-13T10:27:34.498,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/15">>: ok [ns_server:info,2012-11-13T10:27:34.499,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/14">>: ok [ns_server:info,2012-11-13T10:27:34.500,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/13">>: ok [ns_server:info,2012-11-13T10:27:34.502,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/12">>: ok [ns_server:info,2012-11-13T10:27:34.503,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/11">>: ok [ns_server:info,2012-11-13T10:27:34.504,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/10">>: ok [ns_server:info,2012-11-13T10:27:34.505,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/1">>: ok [ns_server:info,2012-11-13T10:27:34.507,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/0">>: ok [ns_server:info,2012-11-13T10:27:34.507,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_databases_and_files:475]Couch dbs are deleted. Proceeding with bucket directory [ns_server:debug,2012-11-13T10:27:34.507,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:do_delete_bucket_indexes:496]indexes directory doesn't exist already. fine. [ns_server:debug,2012-11-13T10:27:34.508,ns_1@127.0.0.1:<0.3986.0>:single_bucket_sup:top_loop:24]per-bucket supervisor for "default" died with reason shutdown [ns_server:debug,2012-11-13T10:27:34.508,ns_1@127.0.0.1:<0.3989.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.3988.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:27:34.508,ns_1@127.0.0.1:<0.3992.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {mc_couch_events,<0.3988.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:27:34.508,ns_1@127.0.0.1:<0.3991.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_node_disco_events,<0.3988.0>} exited with reason shutdown [ns_server:info,2012-11-13T10:27:34.535,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:27:34.334628 PST 3: Shutting down tap connections! [ns_server:info,2012-11-13T10:27:34.739,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:idle:452]Restarting moxi on nodes ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T10:27:34.739,ns_1@127.0.0.1:<0.11079.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {buckets_events,<0.11078.0>} exited with reason normal [ns_server:info,2012-11-13T10:27:34.739,ns_1@127.0.0.1:<0.11154.0>:ns_port_sup:restart_port:134]restarting port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:debug,2012-11-13T10:27:34.740,ns_1@127.0.0.1:<0.10105.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:info,2012-11-13T10:27:34.741,ns_1@127.0.0.1:<0.10105.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:27:34.741,ns_1@127.0.0.1:<0.10105.0>:ns_port_server:log:171]moxi<0.10105.0>: EOL on stdin. Exiting [ns_server:debug,2012-11-13T10:27:34.742,ns_1@127.0.0.1:<0.11155.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [menelaus:info,2012-11-13T10:27:34.743,ns_1@127.0.0.1:<0.10106.0>:menelaus_web_buckets:handle_bucket_delete:340]Deleted bucket "default" [error_logger:info,2012-11-13T10:27:34.746,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.11155.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:27:34.951,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:log:171]moxi<0.11156.0>: 2012-11-13 10:27:34: (cproxy_config.c.317) env: MOXI_SASL_PLAIN_USR (13) moxi<0.11156.0>: 2012-11-13 10:27:34: (cproxy_config.c.326) env: MOXI_SASL_PLAIN_PWD (8) [ns_server:debug,2012-11-13T10:27:50.065,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:27:50.066,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:28:20.067,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:28:20.067,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:28:50.069,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:28:50.069,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:29:20.070,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:29:20.071,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:29:50.072,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:29:50.073,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:30:20.074,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:30:20.074,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:30:50.077,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:30:50.078,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:31:20.078,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:31:20.079,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:31:50.080,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:31:50.081,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:32:20.083,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:32:20.084,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:32:50.086,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:32:50.087,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:33:20.088,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:33:20.089,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:33:50.090,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:33:50.091,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:34:20.092,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:34:20.092,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:34:50.093,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:34:50.094,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:35:17.205,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:35:17.205,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[[{map,[]}, {fastForwardMap,[]}, {uuid,<<"f8aa6b069a7884d9d87f6f421ae39f69">>}, {num_replicas,1}, {replica_index,true}, {auth_type,sasl}, {sasl_password,[]}, {ram_quota,104857600}, {type,membase}, {num_vbuckets,64}, {servers,[]}]]}] [ns_server:debug,2012-11-13T10:35:17.205,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:info,2012-11-13T10:35:17.205,ns_1@127.0.0.1:samples_loader_tasks<0.4011.0>:samples_loader_tasks:maybe_pass_token:89]Passed samples loading token to task: beer-sample [ns_server:debug,2012-11-13T10:35:17.205,ns_1@127.0.0.1:<0.12892.0>:ns_janitor:cleanup_with_membase_bucket_check_servers:46]janitor decided to update servers list [ns_server:debug,2012-11-13T10:35:17.206,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:35:17.206,ns_1@127.0.0.1:ns_bucket_worker<0.3977.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"beer-sample"}, {single_bucket_sup,start_link,["beer-sample"]}, permanent,infinity,supervisor, [single_bucket_sup]} [ns_server:debug,2012-11-13T10:35:17.206,ns_1@127.0.0.1:<0.12897.0>:janitor_agent:new_style_query_vbucket_states_loop:116]Exception from query_vbucket_states of "beer-sample":'ns_1@127.0.0.1' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-beer-sample','ns_1@127.0.0.1'}, query_vbucket_states,infinity]}}} [ns_server:debug,2012-11-13T10:35:17.207,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2012-11-13T10:35:17.207,ns_1@127.0.0.1:<0.12897.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:121]Waiting for "beer-sample" on 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:35:17.207,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.12898.0>}, {name,{per_bucket_sup,"beer-sample"}}, {mfargs,{single_bucket_sup,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2012-11-13T10:35:17.216,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [ns_server:debug,2012-11-13T10:35:17.232,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[{"beer-sample", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"f8aa6b069a7884d9d87f6f421ae39f69">>}, {num_replicas,1}, {replica_index,true}, {auth_type,sasl}, {sasl_password,[]}, {ram_quota,104857600}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}]}]}] [ns_server:debug,2012-11-13T10:35:17.406,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "beer-sample": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-beer-sample','ns_1@127.0.0.1'}, {latest,minute}]}}} [stats:error,2012-11-13T10:35:17.447,ns_1@127.0.0.1:<0.10101.0>:stats_reader:log_bad_responses:191]Some nodes didn't respond: ['ns_1@127.0.0.1'] [ns_server:info,2012-11-13T10:35:17.460,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:log:171]moxi<0.11156.0>: 2012-11-13 10:35:19: (agent_config.c.705) ERROR: bad JSON configuration from http://127.0.0.1:8091/pools/default/saslBucketsStreaming: Number of vBuckets must be a power of two > 0 and <= 65536 ({ moxi<0.11156.0>: "name": "beer-sample", moxi<0.11156.0>: "nodeLocator": "vbucket", moxi<0.11156.0>: "saslPassword": "", moxi<0.11156.0>: "nodes": [{ moxi<0.11156.0>: "hostname": "127.0.0.1:8091", moxi<0.11156.0>: "ports": { moxi<0.11156.0>: "direct": 11210, moxi<0.11156.0>: "proxy": 11211 moxi<0.11156.0>: } moxi<0.11156.0>: }], moxi<0.11156.0>: "vBucketServerMap": { moxi<0.11156.0>: "hashAlgorithm": "CRC", moxi<0.11156.0>: "numReplicas": 1, moxi<0.11156.0>: "serverList": ["127.0.0.1:11210"], moxi<0.11156.0>: "vBucketMap": [] moxi<0.11156.0>: } moxi<0.11156.0>: }) [ns_server:debug,2012-11-13T10:35:17.649,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:init:218]Usable vbuckets: [] [ns_server:debug,2012-11-13T10:35:17.649,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [error_logger:info,2012-11-13T10:35:17.650,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12901.0>}, {name,{capi_set_view_manager,"beer-sample"}}, {mfargs, {capi_set_view_manager,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:error,2012-11-13T10:35:17.898,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,menelaus_sup} Context: child_terminated Reason: {noproc, {gen_server,call, [{'stats_reader-beer-sample','ns_1@127.0.0.1'}, {latest,minute,1}]}} Offender: [{pid,<0.3962.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:17.900,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.12927.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:35:17.955,ns_1@127.0.0.1:<0.3966.0>:mc_tcp_listener:accept_loop:31]Got new connection [ns_server:debug,2012-11-13T10:35:17.956,ns_1@127.0.0.1:<0.3966.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.12928.0> [ns_server:info,2012-11-13T10:35:17.992,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:ensure_bucket:1119]Created bucket "beer-sample" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=104857600;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/beer-sample;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=beer-sample;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/beer-sample/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:35:17.995,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12922.0>}, {name,{ns_memcached,"beer-sample"}}, {mfargs,{ns_memcached,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:17.998,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12933.0>}, {name,{tap_replication_manager,"beer-sample"}}, {mfargs, {tap_replication_manager,start_link, ["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:17.999,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12934.0>}, {name,{ns_vbm_new_sup,"beer-sample"}}, {mfargs,{ns_vbm_new_sup,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:35:18.001,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12935.0>}, {name,{ns_vbm_sup,"beer-sample"}}, {mfargs,{ns_vbm_sup,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:35:18.016,ns_1@127.0.0.1:janitor_agent-beer-sample<0.12936.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2012-11-13T10:35:18.016,ns_1@127.0.0.1:janitor_agent-beer-sample<0.12936.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:35:18.017,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12936.0>}, {name,{janitor_agent,"beer-sample"}}, {mfargs,{janitor_agent,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2012-11-13T10:35:18.018,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-beer-sample-minute' [error_logger:info,2012-11-13T10:35:18.019,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12937.0>}, {name,{couch_stats_reader,"beer-sample"}}, {mfargs, {couch_stats_reader,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.020,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12938.0>}, {name,{stats_collector,"beer-sample"}}, {mfargs,{stats_collector,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.021,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12940.0>}, {name,{stats_archiver,"beer-sample"}}, {mfargs,{stats_archiver,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.022,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12942.0>}, {name,{stats_reader,"beer-sample"}}, {mfargs,{stats_reader,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.023,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12943.0>}, {name,{failover_safeness_level,"beer-sample"}}, {mfargs, {failover_safeness_level,start_link, ["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2012-11-13T10:35:18.085,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-minute', [{name,'stats_archiver-beer-sample-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,18974},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9487,<0.12945.0>}} [ns_server:debug,2012-11-13T10:35:18.086,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-minute', [{name,'stats_archiver-beer-sample-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,18974},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9487,<0.12945.0>}} [ns_server:debug,2012-11-13T10:35:18.087,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-beer-sample-hour' [ns_server:info,2012-11-13T10:35:18.113,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:35:17.912626 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:35:17.941540 PST 3: Connected to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:35:17.991827 PST 3: Warning: failed to load the engine session stats due to IO exception "basic_ios::clear" memcached<0.3972.0>: Tue Nov 13 10:35:17.992033 PST 3: Failed to load mutation log, falling back to key dump memcached<0.3972.0>: Tue Nov 13 10:35:17.992232 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.3972.0>: Tue Nov 13 10:35:17.992439 PST 3: metadata loaded in 14 ms memcached<0.3972.0>: Tue Nov 13 10:35:17.992989 PST 3: warmup completed in 14 ms [ns_server:debug,2012-11-13T10:35:18.152,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [user:info,2012-11-13T10:35:18.154,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:handle_cast:581]Bucket "beer-sample" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:debug,2012-11-13T10:35:18.196,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:current_status:161]Ignoring failure to get stats for bucket: "beer-sample": {error,{exit,{badarg,[{erlang,hd,[[]]}, {stats_reader,'-do_handle_call/3-fun-0-',2}, {mnesia_tm,non_transaction,5}, {stats_reader,do_handle_call,3}, {stats_reader,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}}} [ns_server:info,2012-11-13T10:35:18.209,ns_1@127.0.0.1:<0.12892.0>:ns_janitor:cleanup_with_membase_bucket_check_map:67]janitor decided to generate initial vbucket map [ns_server:debug,2012-11-13T10:35:18.211,ns_1@127.0.0.1:<0.12892.0>:mb_map:generate_map:186]Natural map score: {64,0,0} [ns_server:debug,2012-11-13T10:35:18.212,ns_1@127.0.0.1:<0.12892.0>:mb_map:generate_map:193]Rnd maps scores: {64,0,0}, {64,0,0} [ns_server:debug,2012-11-13T10:35:18.213,ns_1@127.0.0.1:<0.12892.0>:mb_map:generate_map:207]Considering 1 maps: [{64,0,0}] [ns_server:debug,2012-11-13T10:35:18.213,ns_1@127.0.0.1:<0.12892.0>:mb_map:generate_map:219]Best map score: {64,0,0} (true,true,true) [ns_server:debug,2012-11-13T10:35:18.213,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:35:18.214,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets,vbucket_map_history]..) [ns_server:debug,2012-11-13T10:35:18.216,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:35:18.216,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:35:18.217,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: vbucket_map_history -> [{[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}] [ns_server:debug,2012-11-13T10:35:18.219,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[{"beer-sample", [{map,[{0,[],['ns_1@127.0.0.1',undefined]}, {1,[],['ns_1@127.0.0.1',undefined]}, {2,[],['ns_1@127.0.0.1',undefined]}, {3,[],['ns_1@127.0.0.1',undefined]}, {4,[],['ns_1@127.0.0.1',undefined]}, {5,[],['ns_1@127.0.0.1',undefined]}, {6,[],['ns_1@127.0.0.1',undefined]}, {7,[],['ns_1@127.0.0.1',undefined]}, {8,[],['ns_1@127.0.0.1',undefined]}, {9,[],['ns_1@127.0.0.1',undefined]}, {10,[],['ns_1@127.0.0.1',undefined]}, {11,[],['ns_1@127.0.0.1',undefined]}, {12,[],['ns_1@127.0.0.1',undefined]}, {13,[],['ns_1@127.0.0.1',undefined]}, {14,[],['ns_1@127.0.0.1',undefined]}, {15,[],['ns_1@127.0.0.1',undefined]}, {16,[],['ns_1@127.0.0.1',undefined]}, {17,[],['ns_1@127.0.0.1',undefined]}, {18,[],['ns_1@127.0.0.1',undefined]}, {19,[],['ns_1@127.0.0.1',undefined]}, {20,[],['ns_1@127.0.0.1',undefined]}, {21,[],['ns_1@127.0.0.1',undefined]}, {22,[],['ns_1@127.0.0.1',undefined]}, {23,[],['ns_1@127.0.0.1',undefined]}, {24,[],['ns_1@127.0.0.1',undefined]}, {25,[],['ns_1@127.0.0.1',undefined]}, {26,[],['ns_1@127.0.0.1',undefined]}, {27,[],['ns_1@127.0.0.1',undefined]}, {28,[],['ns_1@127.0.0.1',undefined]}, {29,[],['ns_1@127.0.0.1',undefined]}, {30,[],['ns_1@127.0.0.1',undefined]}, {31,[],['ns_1@127.0.0.1',undefined]}, {32,[],['ns_1@127.0.0.1',undefined]}, {33,[],['ns_1@127.0.0.1',undefined]}, {34,[],['ns_1@127.0.0.1',undefined]}, {35,[],['ns_1@127.0.0.1',undefined]}, {36,[],['ns_1@127.0.0.1',undefined]}, {37,[],['ns_1@127.0.0.1',undefined]}, {38,[],['ns_1@127.0.0.1',undefined]}, {39,[],['ns_1@127.0.0.1',undefined]}, {40,[],['ns_1@127.0.0.1',undefined]}, {41,[],['ns_1@127.0.0.1',undefined]}, {42,[],['ns_1@127.0.0.1',undefined]}, {43,[],['ns_1@127.0.0.1',undefined]}, {44,[],['ns_1@127.0.0.1',undefined]}, {45,[],['ns_1@127.0.0.1',undefined]}, {46,[],['ns_1@127.0.0.1',undefined]}, {47,[],['ns_1@127.0.0.1',undefined]}, {48,[],['ns_1@127.0.0.1',undefined]}, {49,[],['ns_1@127.0.0.1',undefined]}, {50,[],['ns_1@127.0.0.1',undefined]}, {51,[],['ns_1@127.0.0.1',undefined]}, {52,[],['ns_1@127.0.0.1',undefined]}, {53,[],['ns_1@127.0.0.1',undefined]}, {54,[],['ns_1@127.0.0.1',undefined]}, {55,[],['ns_1@127.0.0.1',undefined]}, {56,[],['ns_1@127.0.0.1',undefined]}, {57,[],['ns_1@127.0.0.1',undefined]}, {58,[],['ns_1@127.0.0.1',undefined]}, {59,[],['ns_1@127.0.0.1',undefined]}, {60,[],['ns_1@127.0.0.1',undefined]}, {61,[],['ns_1@127.0.0.1',undefined]}, {62,[],['ns_1@127.0.0.1',undefined]}, {63,[],['ns_1@127.0.0.1',undefined]}]}, {fastForwardMap,[]}, {uuid,<<"f8aa6b069a7884d9d87f6f421ae39f69">>}, {num_replicas,1}, {replica_index,true}, {auth_type,sasl}, {sasl_password,[]}, {ram_quota,104857600}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}]}]}] [ns_server:debug,2012-11-13T10:35:18.216,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:info,2012-11-13T10:35:18.236,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:debug,2012-11-13T10:35:18.237,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:info,2012-11-13T10:35:18.238,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:35:18.239,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:35:18.240,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:35:18.241,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [ns_server:info,2012-11-13T10:35:18.241,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:info,2012-11-13T10:35:18.242,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [ns_server:info,2012-11-13T10:35:18.243,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:35:18.244,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:35:18.244,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:35:18.245,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [ns_server:info,2012-11-13T10:35:18.245,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [ns_server:info,2012-11-13T10:35:18.328,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:debug,2012-11-13T10:35:18.328,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-hour', [{name,'stats_archiver-beer-sample-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,110485},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9488,<0.12952.0>}} [ns_server:info,2012-11-13T10:35:18.329,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:debug,2012-11-13T10:35:18.329,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-hour', [{name,'stats_archiver-beer-sample-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,110485},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9488,<0.12952.0>}} [ns_server:info,2012-11-13T10:35:18.330,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:debug,2012-11-13T10:35:18.330,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-beer-sample-day' [ns_server:info,2012-11-13T10:35:18.331,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:35:18.331,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [ns_server:info,2012-11-13T10:35:18.332,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:info,2012-11-13T10:35:18.332,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [ns_server:info,2012-11-13T10:35:18.333,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:info,2012-11-13T10:35:18.333,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:35:18.334,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:35:18.334,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:35:18.334,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [ns_server:info,2012-11-13T10:35:18.388,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:info,2012-11-13T10:35:18.389,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:35:18.390,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:35:18.392,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [ns_server:info,2012-11-13T10:35:18.393,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:35:18.394,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:35:18.395,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:35:18.396,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [ns_server:info,2012-11-13T10:35:18.396,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:info,2012-11-13T10:35:18.397,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:35:18.398,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:35:18.398,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:35:18.399,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:35:18.475,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [ns_server:info,2012-11-13T10:35:18.476,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:info,2012-11-13T10:35:18.477,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:35:18.478,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:35:18.479,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:35:18.480,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [ns_server:info,2012-11-13T10:35:18.480,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:info,2012-11-13T10:35:18.481,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:35:18.481,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:info,2012-11-13T10:35:18.482,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [ns_server:info,2012-11-13T10:35:18.482,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:35:18.483,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:35:18.484,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:35:18.484,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [ns_server:info,2012-11-13T10:35:18.485,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:info,2012-11-13T10:35:18.485,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:35:18.486,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:35:18.486,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [ns_server:info,2012-11-13T10:35:18.487,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [ns_server:info,2012-11-13T10:35:18.487,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:35:18.488,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:35:18.488,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [ns_server:info,2012-11-13T10:35:18.489,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:35:18.489,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:35:18.489,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [ns_server:info,2012-11-13T10:35:18.490,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [ns_server:info,2012-11-13T10:35:18.490,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:35:18.491,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "beer-sample" [ns_server:info,2012-11-13T10:35:18.491,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:handle_call:248]Bucket "beer-sample" marked as warmed in 0 seconds [ns_server:debug,2012-11-13T10:35:18.495,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-day', [{name,'stats_archiver-beer-sample-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,331053},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9489,<0.12974.0>}} [ns_server:debug,2012-11-13T10:35:18.496,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-day', [{name,'stats_archiver-beer-sample-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,331053},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9489,<0.12974.0>}} [ns_server:debug,2012-11-13T10:35:18.497,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-beer-sample-week' [views:debug,2012-11-13T10:35:18.681,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/63. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:18.796,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "?" [ns_server:debug,2012-11-13T10:35:18.796,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",63,active,0} [ns_server:debug,2012-11-13T10:35:18.965,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-week', [{name,'stats_archiver-beer-sample-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,497578},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9490,<0.12989.0>}} [ns_server:debug,2012-11-13T10:35:18.966,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-week', [{name,'stats_archiver-beer-sample-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,497578},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9490,<0.12989.0>}} [ns_server:debug,2012-11-13T10:35:18.967,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-beer-sample-month' [views:debug,2012-11-13T10:35:19.077,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/62. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:19.103,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "?>" [ns_server:debug,2012-11-13T10:35:19.103,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",62,active,0} [ns_server:debug,2012-11-13T10:35:19.223,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-month', [{name,'stats_archiver-beer-sample-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,967872},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9493,<0.13002.0>}} [ns_server:debug,2012-11-13T10:35:19.224,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-month', [{name,'stats_archiver-beer-sample-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831718,967872},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9493,<0.13002.0>}} [ns_server:debug,2012-11-13T10:35:19.224,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_call:109]Creating table 'stats_archiver-beer-sample-year' [views:debug,2012-11-13T10:35:19.346,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/61. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:19.346,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",61,active,0} [ns_server:debug,2012-11-13T10:35:19.347,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "?>=" [ns_server:debug,2012-11-13T10:35:19.488,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-year', [{name,'stats_archiver-beer-sample-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831719,225104},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9494,<0.13015.0>}} [ns_server:debug,2012-11-13T10:35:19.489,ns_1@127.0.0.1:mb_mnesia<0.3759.0>:mb_mnesia:handle_info:222]Mnesia table event: {write,{schema,'stats_archiver-beer-sample-year', [{name,'stats_archiver-beer-sample-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {majority,false}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1352,831719,225104},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9494,<0.13015.0>}} [views:debug,2012-11-13T10:35:19.592,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/60. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:19.593,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",60,active,0} [ns_server:debug,2012-11-13T10:35:19.593,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "=" [views:debug,2012-11-13T10:35:19.694,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/59. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:19.695,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",59,active,0} [ns_server:debug,2012-11-13T10:35:19.695,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "=" [views:debug,2012-11-13T10:35:19.828,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/58. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:19.828,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: ":=" [ns_server:debug,2012-11-13T10:35:19.828,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",58,active,0} [views:debug,2012-11-13T10:35:19.929,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/57. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:19.930,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",57,active,0} [ns_server:debug,2012-11-13T10:35:19.930,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "9:=" [views:debug,2012-11-13T10:35:20.029,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/56. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:20.030,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "9:=" [ns_server:debug,2012-11-13T10:35:20.030,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",56,active,0} [ns_server:debug,2012-11-13T10:35:20.095,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"beer-sample">>] [ns_server:info,2012-11-13T10:35:20.097,ns_1@127.0.0.1:<0.13060.0>:compaction_daemon:check_all_dbs_exist:1266]Skipping compaction of bucket `beer-sample` since at least database `beer-sample/0` seems to be missing. [ns_server:debug,2012-11-13T10:35:20.097,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:35:20.098,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [views:debug,2012-11-13T10:35:20.142,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/55. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:20.142,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "97:=" [ns_server:debug,2012-11-13T10:35:20.142,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",55,active,0} [views:debug,2012-11-13T10:35:20.303,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/54. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:20.303,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "697:=" [ns_server:debug,2012-11-13T10:35:20.303,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",54,active,0} [views:debug,2012-11-13T10:35:20.466,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/53. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:20.467,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",53,active,0} [ns_server:debug,2012-11-13T10:35:20.467,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "697:=" [views:debug,2012-11-13T10:35:20.607,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/52. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:20.608,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",52,active,0} [ns_server:debug,2012-11-13T10:35:20.608,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "6947:=" [views:debug,2012-11-13T10:35:20.764,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/51. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:20.764,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",51,active,0} [ns_server:debug,2012-11-13T10:35:20.764,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "36947:=" [views:debug,2012-11-13T10:35:20.963,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/50. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:20.963,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "36947:=" [ns_server:debug,2012-11-13T10:35:20.963,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",50,active,0} [views:debug,2012-11-13T10:35:21.134,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/49. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:21.135,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",49,active,0} [ns_server:debug,2012-11-13T10:35:21.135,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "369147:=" [views:debug,2012-11-13T10:35:21.303,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/48. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:21.304,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369147:=" [ns_server:debug,2012-11-13T10:35:21.304,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",48,active,0} [views:debug,2012-11-13T10:35:21.518,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/47. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:21.518,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369147:=" [ns_server:debug,2012-11-13T10:35:21.519,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",47,active,0} [views:debug,2012-11-13T10:35:21.697,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/46. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:21.698,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",46,active,0} [ns_server:debug,2012-11-13T10:35:21.698,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369.147:=" [views:debug,2012-11-13T10:35:21.832,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/45. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:21.833,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369.147:=-" [ns_server:debug,2012-11-13T10:35:21.833,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",45,active,0} [views:debug,2012-11-13T10:35:21.989,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/44. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:21.989,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",44,active,0} [ns_server:debug,2012-11-13T10:35:21.989,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369<,?/258;>.147:=-" [ns_server:info,2012-11-13T10:35:22.002,ns_1@127.0.0.1:ns_doctor<0.3916.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["beer-sample"] [views:debug,2012-11-13T10:35:22.241,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/43. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:22.242,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",43,active,0} [ns_server:debug,2012-11-13T10:35:22.242,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369<,?/258;+>.147:=-" [views:debug,2012-11-13T10:35:22.401,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/42. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:22.401,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",42,active,0} [ns_server:debug,2012-11-13T10:35:22.401,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369<,?/258;+>.147:*=-" [views:debug,2012-11-13T10:35:22.525,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/41. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:22.525,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",41,active,0} [ns_server:debug,2012-11-13T10:35:22.526,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369)<,?/258;+>.147:*=-" [views:debug,2012-11-13T10:35:22.652,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/40. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:22.653,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",40,active,0} [ns_server:debug,2012-11-13T10:35:22.653,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369)<,?/258(;+>.147:*=-" [views:debug,2012-11-13T10:35:22.760,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/39. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:22.760,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",39,active,0} [ns_server:debug,2012-11-13T10:35:22.760,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0369)<,?/258(;+>.147':*=-" [views:debug,2012-11-13T10:35:22.871,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/38. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:22.871,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",38,active,0} [ns_server:debug,2012-11-13T10:35:22.872,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "036&9)<,?/258(;+>.147':*=-" [views:debug,2012-11-13T10:35:23.018,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/37. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.018,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 0 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.018,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",37,active,0} [ns_server:debug,2012-11-13T10:35:23.019,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "036&9)<,?/25%8(;+>.147':*=-" [ns_server:debug,2012-11-13T10:35:23.020,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 1 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.021,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 2 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.131,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 3 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:35:23.131,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/36. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.132,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",36,active,0} [ns_server:debug,2012-11-13T10:35:23.132,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "036&9)<,?/25%8(;+>.14$7':*=-" [ns_server:debug,2012-11-13T10:35:23.132,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 4 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.134,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 5 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:35:23.239,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/35. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.240,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 6 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.240,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",35,active,0} [ns_server:debug,2012-11-13T10:35:23.240,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "03#6&9)<,?/25%8(;+>.14$7':*=-" [ns_server:debug,2012-11-13T10:35:23.241,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 7 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.242,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 8 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.243,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 9 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.390,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 10 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:35:23.391,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/34. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.392,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",34,active,0} [ns_server:debug,2012-11-13T10:35:23.392,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 11 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.392,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "03#6&9)<,?/2\"5%8(;+>.14$7':*=-" [ns_server:debug,2012-11-13T10:35:23.393,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 12 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.394,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 13 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.395,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 14 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.396,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 15 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.397,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 16 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.397,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 17 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.398,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 18 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.399,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 19 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.399,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 20 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.400,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 21 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.401,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 22 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.401,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 23 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.402,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 24 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.402,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 25 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.403,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 26 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.403,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 27 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.404,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 28 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.404,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 29 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.405,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 30 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.405,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 31 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:23.406,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 32 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:35:23.490,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/33. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.491,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",33,active,0} [ns_server:debug,2012-11-13T10:35:23.491,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "03#6&9)<,?/2\"5%8(;+>.1!4$7':*=-" [views:debug,2012-11-13T10:35:23.685,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/32. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.686,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",32,active,0} [ns_server:debug,2012-11-13T10:35:23.686,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: "0 3#6&9)<,?/2\"5%8(;+>.1!4$7':*=-" [views:debug,2012-11-13T10:35:23.801,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/31. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.802,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",31,active,0} [ns_server:debug,2012-11-13T10:35:23.802,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,63,47,31,50,34,53,37,56,40,59,43,62,46,49,33, 52,36,55,39,58,42,61,45] [views:debug,2012-11-13T10:35:23.949,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/30. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:23.950,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",30,active,0} [ns_server:debug,2012-11-13T10:35:23.950,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,63,47,31,50,34,53,37,56,40,59,43,62,46,30,49, 33,52,36,55,39,58,42,61,45] [views:debug,2012-11-13T10:35:24.099,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/29. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:24.099,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",29,active,0} [ns_server:debug,2012-11-13T10:35:24.100,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,63,47,31,50,34,53,37,56,40,59,43,62,46,30,49, 33,52,36,55,39,58,42,61,45,29] [views:debug,2012-11-13T10:35:24.309,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/28. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:24.310,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,28,63,47,31,50,34,53,37,56,40,59,43,62,46,30, 49,33,52,36,55,39,58,42,61,45,29] [ns_server:debug,2012-11-13T10:35:24.310,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",28,active,0} [views:debug,2012-11-13T10:35:24.496,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/27. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:24.496,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,28,63,47,31,50,34,53,37,56,40,59,43,27,62,46, 30,49,33,52,36,55,39,58,42,61,45,29] [ns_server:debug,2012-11-13T10:35:24.497,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",27,active,0} [views:debug,2012-11-13T10:35:24.666,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/26. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:24.667,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,60,44,28,63,47,31,50,34,53,37,56,40,59,43,27,62,46, 30,49,33,52,36,55,39,58,42,26,61,45,29] [ns_server:debug,2012-11-13T10:35:24.667,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",26,active,0} [views:debug,2012-11-13T10:35:24.849,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/25. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:24.850,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",25,active,0} [ns_server:debug,2012-11-13T10:35:24.849,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,59,43,27,62, 46,30,49,33,52,36,55,39,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:25.010,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/24. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:25.011,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",24,active,0} [ns_server:debug,2012-11-13T10:35:25.011,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,24,59,43,27, 62,46,30,49,33,52,36,55,39,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:25.167,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/23. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:25.168,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",23,active,0} [ns_server:debug,2012-11-13T10:35:25.168,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,24,59,43,27, 62,46,30,49,33,52,36,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:25.312,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/22. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:25.312,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",22,active,0} [ns_server:debug,2012-11-13T10:35:25.312,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,56,40,24,59,43, 27,62,46,30,49,33,52,36,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:25.437,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/21. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:25.440,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",21,active,0} [ns_server:debug,2012-11-13T10:35:25.440,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,21,56,40,24,59, 43,27,62,46,30,49,33,52,36,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:25.591,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/20. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:25.592,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",20,active,0} [ns_server:debug,2012-11-13T10:35:25.592,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,21,56,40,24,59, 43,27,62,46,30,49,33,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:25.751,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/19. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:25.752,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",19,active,0} [ns_server:debug,2012-11-13T10:35:25.752,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,53,37,21,56,40,24, 59,43,27,62,46,30,49,33,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:25.873,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/18. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:25.874,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",18,active,0} [ns_server:debug,2012-11-13T10:35:25.874,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,18,53,37,21,56,40, 24,59,43,27,62,46,30,49,33,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:26.055,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/17. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:26.056,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",17,active,0} [ns_server:debug,2012-11-13T10:35:26.056,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,18,53,37,21,56,40, 24,59,43,27,62,46,30,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:26.245,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/16. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:26.246,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",16,active,0} [ns_server:debug,2012-11-13T10:35:26.246,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,50,34,18,53,37,21,56, 40,24,59,43,27,62,46,30,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:26.548,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/15. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:26.549,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",15,active,0} [ns_server:debug,2012-11-13T10:35:26.549,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,15,50,34,18,53,37,21, 56,40,24,59,43,27,62,46,30,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:26.788,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/14. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:26.789,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",14,active,0} [ns_server:debug,2012-11-13T10:35:26.789,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,15,50,34,18,53,37,21, 56,40,24,59,43,27,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29] [views:debug,2012-11-13T10:35:27.168,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/13. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:27.168,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",13,active,0} [ns_server:debug,2012-11-13T10:35:27.168,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,63,47,31,15,50,34,18,53,37,21, 56,40,24,59,43,27,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61,45,29, 13] [views:debug,2012-11-13T10:35:27.825,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/12. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:27.826,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",12,active,0} [ns_server:debug,2012-11-13T10:35:27.826,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61,45, 29,13] [ns_server:debug,2012-11-13T10:35:28.302,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 0 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:35:28.303,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/11. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:28.303,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",11,active,0} [ns_server:debug,2012-11-13T10:35:28.304,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,61, 45,29,13] [ns_server:debug,2012-11-13T10:35:28.304,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 1 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.306,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 2 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.307,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 3 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.324,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 4 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.325,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 5 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.326,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 6 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.327,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 7 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.328,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 8 ({not_found,no_db_file}). Ignoring [ns_server:debug,2012-11-13T10:35:28.329,ns_1@127.0.0.1:couch_stats_reader-beer-sample<0.12937.0>:couch_stats_reader:vbuckets_aggregation_loop:126]Failed to open vbucket: 9 ({not_found,no_db_file}). Ignoring [views:debug,2012-11-13T10:35:28.645,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/10. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:28.646,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",10,active,0} [ns_server:debug,2012-11-13T10:35:28.646,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,10, 61,45,29,13] [views:debug,2012-11-13T10:35:29.263,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/9. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:29.264,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",9,active,0} [ns_server:debug,2012-11-13T10:35:29.264,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,10, 61,45,29,13] [views:debug,2012-11-13T10:35:29.867,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/8. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:29.869,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",8,active,0} [ns_server:debug,2012-11-13T10:35:29.869,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,55,39,23,58,42,26,10, 61,45,29,13] [views:debug,2012-11-13T10:35:30.247,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/7. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:30.248,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",7,active,0} [ns_server:debug,2012-11-13T10:35:30.248,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53,37, 21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,7,55,39,23,58,42,26, 10,61,45,29,13] [views:debug,2012-11-13T10:35:30.521,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/6. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:30.521,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",6,active,0} [ns_server:debug,2012-11-13T10:35:30.521,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53, 37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,7,55,39,23,58,42, 26,10,61,45,29,13] [views:debug,2012-11-13T10:35:30.806,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/5. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:30.806,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",5,active,0} [ns_server:debug,2012-11-13T10:35:30.806,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53, 5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,36,20,7,55,39,23,58, 42,26,10,61,45,29,13] [views:debug,2012-11-13T10:35:31.142,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/4. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:31.143,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",4,active,0} [ns_server:debug,2012-11-13T10:35:31.142,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18,53, 5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,4,36,20,7,55,39,23,58, 42,26,10,61,45,29,13] [views:debug,2012-11-13T10:35:31.637,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/3. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:31.638,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",3,active,0} [ns_server:debug,2012-11-13T10:35:31.638,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,18, 53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,4,36,20,7,55,39,23, 58,42,26,10,61,45,29,13] [views:debug,2012-11-13T10:35:32.145,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/2. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:32.146,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",2,active,0} [ns_server:debug,2012-11-13T10:35:32.146,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2,18, 53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,52,4,36,20,7,55,39,23, 58,42,26,10,61,45,29,13] [views:debug,2012-11-13T10:35:32.821,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/1. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:32.822,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",1,active,0} [ns_server:debug,2012-11-13T10:35:32.822,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2,18, 53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,1,52,4,36,20,7,55,39, 23,58,42,26,10,61,45,29,13] [views:debug,2012-11-13T10:35:33.101,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/0. Updated state: active (0) [ns_server:debug,2012-11-13T10:35:33.102,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:377]Usable vbuckets: [48,32,16,0,51,35,3,19,6,54,38,22,9,57,41,25,60,44,28,12,63,47,31,15,50,34,2, 18,53,5,37,21,8,56,40,24,59,43,27,11,62,46,30,14,49,33,17,1,52,4,36,20,7,55, 39,23,58,42,26,10,61,45,29,13] [ns_server:debug,2012-11-13T10:35:33.102,ns_1@127.0.0.1:<0.12928.0>:mc_connection:do_notify_vbucket_update:112]Signaled mc_couch_event: {set_vbucket,"beer-sample",0,active,0} [ns_server:debug,2012-11-13T10:35:33.436,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "[2012-11-13 10:35:33,436] - [rest_client] [140735135437184] - INFO - existing buckets : [u'beer-sample']\n" [ns_server:debug,2012-11-13T10:35:33.437,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "[2012-11-13 10:35:33,436] - [rest_client] [140735135437184] - INFO - found bucket beer-sample\n" [ns_server:debug,2012-11-13T10:35:50.099,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"beer-sample">>] [ns_server:info,2012-11-13T10:35:50.100,ns_1@127.0.0.1:<0.13657.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `beer-sample` [ns_server:info,2012-11-13T10:35:50.101,ns_1@127.0.0.1:<0.13657.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket beer-sample with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:35:50.109,ns_1@127.0.0.1:<0.13660.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `beer-sample`: data 5667, file 502141 [ns_server:debug,2012-11-13T10:35:50.109,ns_1@127.0.0.1:<0.13660.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `beer-sample` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:35:50.109,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:35:50.110,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:36:05.663,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_call:254]Writing interactively saved ddoc {doc,<<"_design/beer">>, {0,<<>>}, <<"{\"language\":\"javascript\",\"views\":{\"brewery_beers\":{\"map\":\"function(doc, meta) {\\n switch(doc.type) {\\n case \\\"brewery\\\":\\n emit([meta.id]);\\n break;\\n case \\\"beer\\\":\\n if (doc.brewery_id) {\\n emit([doc.brewery_id, meta.id]);\\n }\\n break;\\n }\\n}\\n\"},\"by_location\":{\"map\":\"function (doc, meta) {\\n if (doc.country, doc.state, doc.city) {\\n emit([doc.country, doc.state, doc.city], 1);\\n } else if (doc.country, doc.state) {\\n emit([doc.country, doc.state], 1);\\n } else if (doc.country) {\\n emit([doc.country], 1);\\n }\\n}\",\"reduce\":\"_count\"}}}">>, 0,false,[]} [ns_server:info,2012-11-13T10:36:06.153,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:353]Processing update_ddoc _design/beer (false) [views:debug,2012-11-13T10:36:06.189,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:maybe_define_group:419] Calling couch_set_view:define_group([<<"beer-sample">>,<<"_design/beer">>, {set_view_params,64,[],[],true}]) [views:debug,2012-11-13T10:36:07.160,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:maybe_define_group:419] couch_set_view:define_group([<<"beer-sample">>,<<"_design/beer">>, {set_view_params,64,[],[],true}]) returned ok in 970ms [views:debug,2012-11-13T10:36:07.173,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:465] Calling couch_set_view:mark_partitions_indexable([<<"beer-sample">>, <<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11, 12,13,14,15,16,17,18,19,20, 21,22,23,24,25,26,27,28,29, 30,31,32,33,34,35,36,37,38, 39,40,41,42,43,44,45,46,47, 48,49,50,51,52,53,54,55,56, 57,58,59,60,61,62,63]]) [views:debug,2012-11-13T10:36:07.174,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:466] couch_set_view:mark_partitions_indexable([<<"beer-sample">>, <<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14, 15,16,17,18,19,20,21,22,23,24,25, 26,27,28,29,30,31,32,33,34,35,36, 37,38,39,40,41,42,43,44,45,46,47, 48,49,50,51,52,53,54,55,56,57,58, 59,60,61,62,63]]) returned ok in 0ms [views:debug,2012-11-13T10:36:07.174,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:471] Calling couch_set_view:set_partition_states([<<"beer-sample">>, <<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11,12,13, 14,15,16,17,18,19,20,21,22,23, 24,25,26,27,28,29,30,31,32,33, 34,35,36,37,38,39,40,41,42,43, 44,45,46,47,48,49,50,51,52,53, 54,55,56,57,58,59,60,61,62,63], [],[]]) [views:debug,2012-11-13T10:36:07.276,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:472] couch_set_view:set_partition_states([<<"beer-sample">>,<<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, 16,17,18,19,20,21,22,23,24,25,26,27,28, 29,30,31,32,33,34,35,36,37,38,39,40,41, 42,43,44,45,46,47,48,49,50,51,52,53,54, 55,56,57,58,59,60,61,62,63], [],[]]) returned ok in 101ms [views:debug,2012-11-13T10:36:07.277,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:476] Calling couch_set_view:add_replica_partitions([<<"beer-sample">>, <<"_design/beer">>,[]]) [views:debug,2012-11-13T10:36:07.277,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:477] couch_set_view:add_replica_partitions([<<"beer-sample">>,<<"_design/beer">>, []]) returned ok in 0ms [views:debug,2012-11-13T10:36:07.278,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:478] Calling couch_set_view:remove_replica_partitions([<<"beer-sample">>, <<"_design/beer">>,[]]) [views:debug,2012-11-13T10:36:07.278,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:479] couch_set_view:remove_replica_partitions([<<"beer-sample">>, <<"_design/beer">>,[]]) returned ok in 0ms [views:debug,2012-11-13T10:36:07.278,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:488] Calling couch_set_view:mark_partitions_unindexable([<<"beer-sample">>, <<"_design/beer">>,[]]) [views:debug,2012-11-13T10:36:07.279,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:489] couch_set_view:mark_partitions_unindexable([<<"beer-sample">>, <<"_design/beer">>,[]]) returned ok in 0ms [menelaus:warn,2012-11-13T10:36:10.435,ns_1@127.0.0.1:<0.13447.0>:menelaus_web:loop:342]Client-side error-report for user "Administrator" on node 'ns_1@127.0.0.1': User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11 Got unhandled error: Uncaught TypeError: Cannot call method 'concat' of undefined At: http://127.0.0.1:8091/js/analytics.js:186 Backtrace: Function: collectBacktraceViaCaller Args: --------- Function: appOnError Args: "Uncaught TypeError: Cannot call method 'concat' of undefined" "http://127.0.0.1:8091/js/analytics.js" 186 --------- [ns_server:debug,2012-11-13T10:36:20.110,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:269]Starting compaction for the following buckets: [<<"beer-sample">>] [ns_server:info,2012-11-13T10:36:20.112,ns_1@127.0.0.1:<0.13872.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `beer-sample` [ns_server:info,2012-11-13T10:36:20.221,ns_1@127.0.0.1:<0.13872.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket beer-sample with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2012-11-13T10:36:20.229,ns_1@127.0.0.1:<0.13875.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `beer-sample`: data 242933, file 2485226 [ns_server:debug,2012-11-13T10:36:20.229,ns_1@127.0.0.1:<0.13875.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `beer-sample` is less than min_file_size 8519680; skipping [ns_server:debug,2012-11-13T10:36:20.229,ns_1@127.0.0.1:<0.13877.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `beer-sample/_design/beer/main`: data 0, file 8516 [ns_server:debug,2012-11-13T10:36:20.230,ns_1@127.0.0.1:<0.13877.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `beer-sample/_design/beer/main` is less than min_file_size 131072; skipping [ns_server:debug,2012-11-13T10:36:20.230,ns_1@127.0.0.1:<0.13878.0>:compaction_daemon:file_needs_compaction:831]Estimated size for `beer-sample/_design/beer/replica`: data 0, file 4165 [ns_server:debug,2012-11-13T10:36:20.230,ns_1@127.0.0.1:<0.13878.0>:compaction_daemon:file_needs_compaction:836]Estimated file size for `beer-sample/_design/beer/replica` is less than min_file_size 131072; skipping [ns_server:debug,2012-11-13T10:36:20.230,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:309]Finished compaction iteration. [ns_server:debug,2012-11-13T10:36:20.231,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:36:43.362,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:353]Got config_changed in state idle. Nothing to do since compaction is not running [ns_server:debug,2012-11-13T10:36:43.362,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:36:43.362,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:log_common:111]config change: buckets -> [{configs,[]}] [ns_server:debug,2012-11-13T10:36:43.362,ns_1@127.0.0.1:ns_bucket_worker<0.3977.0>:ns_bucket_sup:update_childs:91]Stopping child for dead bucket: {{per_bucket_sup,"beer-sample"}, <0.12898.0>,supervisor, [single_bucket_sup]} [ns_server:debug,2012-11-13T10:36:43.363,ns_1@127.0.0.1:ns_config_rep<0.3900.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2012-11-13T10:36:43.363,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:36:43.363,ns_1@127.0.0.1:<0.12898.0>:single_bucket_sup:top_loop:28]Delegating exit {'EXIT',<0.3983.0>,shutdown} to child supervisor: <0.12899.0> [ns_server:debug,2012-11-13T10:36:43.363,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:349]doing replicate_newnodes_docs [ns_server:debug,2012-11-13T10:36:43.364,ns_1@127.0.0.1:<0.12941.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_stats_event,<0.12940.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:36:43.364,ns_1@127.0.0.1:<0.12939.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_tick_event,<0.12938.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:36:43.364,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_config_isasl_sync:writeSASLConf:133]Writing isasl passwd file: "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw" [user:info,2012-11-13T10:36:43.365,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:terminate:661]Shutting down bucket "beer-sample" on 'ns_1@127.0.0.1' for deletion [ns_server:debug,2012-11-13T10:36:43.383,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:terminate:686]Proceeding into vbuckets dbs deletions [ns_server:debug,2012-11-13T10:36:43.388,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "[2012-11-13 10:36:43,388] - [couchbaseclient] [4511059968] - ERROR - got EOF\n" [ns_server:debug,2012-11-13T10:36:43.388,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "[2012-11-13 10:36:43,388] - [couchbaseclient] [4511059968] - ERROR - Got empty data (remote died?). from 127.0.0.1\n" [ns_server:info,2012-11-13T10:36:43.443,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/master">>: ok [ns_server:debug,2012-11-13T10:36:43.444,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "[2012-11-13 10:36:43,443] - [rest_client] [4511059968] - ERROR - http://127.0.0.1:8091/pools/default/buckets/beer-sample error 404 reason: unknown Requested resource not found.\r\n\n" [ns_server:info,2012-11-13T10:36:43.445,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/9">>: ok [ns_server:info,2012-11-13T10:36:43.447,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/8">>: ok [ns_server:info,2012-11-13T10:36:43.449,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/7">>: ok [ns_server:info,2012-11-13T10:36:43.450,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/63">>: ok [ns_server:info,2012-11-13T10:36:43.452,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/62">>: ok [ns_server:info,2012-11-13T10:36:43.454,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/61">>: ok [ns_server:info,2012-11-13T10:36:43.518,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/60">>: ok [ns_server:info,2012-11-13T10:36:43.521,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/6">>: ok [ns_server:info,2012-11-13T10:36:43.523,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/59">>: ok [ns_server:info,2012-11-13T10:36:43.525,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/58">>: ok [ns_server:info,2012-11-13T10:36:43.526,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/57">>: ok [ns_server:info,2012-11-13T10:36:43.528,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/56">>: ok [ns_server:info,2012-11-13T10:36:43.566,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:36:43.366118 PST 3: Shutting down tap connections! [ns_server:info,2012-11-13T10:36:43.566,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/55">>: ok [ns_server:info,2012-11-13T10:36:43.567,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/54">>: ok [ns_server:info,2012-11-13T10:36:43.569,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/53">>: ok [ns_server:info,2012-11-13T10:36:43.571,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/52">>: ok [ns_server:info,2012-11-13T10:36:43.572,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/51">>: ok [ns_server:info,2012-11-13T10:36:43.573,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/50">>: ok [ns_server:info,2012-11-13T10:36:43.575,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/5">>: ok [ns_server:info,2012-11-13T10:36:43.577,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/49">>: ok [ns_server:info,2012-11-13T10:36:43.579,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/48">>: ok [ns_server:info,2012-11-13T10:36:43.580,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/47">>: ok [ns_server:info,2012-11-13T10:36:43.582,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/46">>: ok [ns_server:info,2012-11-13T10:36:43.583,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/45">>: ok [ns_server:info,2012-11-13T10:36:43.584,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/44">>: ok [ns_server:info,2012-11-13T10:36:43.586,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/43">>: ok [ns_server:info,2012-11-13T10:36:43.587,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/42">>: ok [ns_server:info,2012-11-13T10:36:43.589,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/41">>: ok [ns_server:info,2012-11-13T10:36:43.590,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/40">>: ok [ns_server:info,2012-11-13T10:36:43.591,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/4">>: ok [ns_server:info,2012-11-13T10:36:43.593,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/39">>: ok [ns_server:info,2012-11-13T10:36:43.594,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/38">>: ok [ns_server:info,2012-11-13T10:36:43.595,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/37">>: ok [ns_server:info,2012-11-13T10:36:43.596,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/36">>: ok [ns_server:info,2012-11-13T10:36:43.598,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/35">>: ok [ns_server:info,2012-11-13T10:36:43.600,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/34">>: ok [ns_server:info,2012-11-13T10:36:43.601,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/33">>: ok [ns_server:info,2012-11-13T10:36:43.603,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/32">>: ok [ns_server:info,2012-11-13T10:36:43.604,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/31">>: ok [ns_server:info,2012-11-13T10:36:43.605,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/30">>: ok [ns_server:info,2012-11-13T10:36:43.606,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/3">>: ok [ns_server:info,2012-11-13T10:36:43.608,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/29">>: ok [ns_server:info,2012-11-13T10:36:43.609,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/28">>: ok [ns_server:info,2012-11-13T10:36:43.611,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/27">>: ok [ns_server:info,2012-11-13T10:36:43.612,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/26">>: ok [ns_server:info,2012-11-13T10:36:43.613,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/25">>: ok [ns_server:info,2012-11-13T10:36:43.614,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/24">>: ok [ns_server:info,2012-11-13T10:36:43.616,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/23">>: ok [ns_server:info,2012-11-13T10:36:43.617,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/22">>: ok [ns_server:info,2012-11-13T10:36:43.618,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/21">>: ok [ns_server:info,2012-11-13T10:36:43.619,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/20">>: ok [ns_server:info,2012-11-13T10:36:43.621,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/2">>: ok [ns_server:info,2012-11-13T10:36:43.622,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/19">>: ok [ns_server:info,2012-11-13T10:36:43.623,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/18">>: ok [ns_server:info,2012-11-13T10:36:43.624,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/17">>: ok [ns_server:info,2012-11-13T10:36:43.626,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/16">>: ok [ns_server:info,2012-11-13T10:36:43.627,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/15">>: ok [ns_server:info,2012-11-13T10:36:43.628,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/14">>: ok [ns_server:info,2012-11-13T10:36:43.629,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/13">>: ok [ns_server:info,2012-11-13T10:36:43.630,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/12">>: ok [ns_server:info,2012-11-13T10:36:43.632,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/11">>: ok [ns_server:info,2012-11-13T10:36:43.633,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/10">>: ok [ns_server:info,2012-11-13T10:36:43.634,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/1">>: ok [ns_server:info,2012-11-13T10:36:43.635,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/0">>: ok [ns_server:info,2012-11-13T10:36:43.635,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_databases_and_files:475]Couch dbs are deleted. Proceeding with bucket directory [ns_server:debug,2012-11-13T10:36:43.636,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:do_delete_bucket_indexes:496]indexes directory doesn't exist already. fine. [ns_server:debug,2012-11-13T10:36:43.637,ns_1@127.0.0.1:<0.12920.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_node_disco_events,<0.12901.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:36:43.637,ns_1@127.0.0.1:<0.12921.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {mc_couch_events,<0.12901.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:36:43.637,ns_1@127.0.0.1:<0.12898.0>:single_bucket_sup:top_loop:24]per-bucket supervisor for "beer-sample" died with reason shutdown [ns_server:debug,2012-11-13T10:36:43.637,ns_1@127.0.0.1:<0.12902.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {ns_config_events,<0.12901.0>} exited with reason shutdown [ns_server:debug,2012-11-13T10:36:43.670,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "Exception in thread dispatcher-thread:\nTraceback (most recent call last):\n File \"/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py\", line 552, in __bootstrap_inner\n self.run()\n File \"/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/threading.py\", line 505, in run\n self.__target(*self.__args, **self.__kwargs)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 699, in _start_dispatcher\n self.dispatcher.dispatch()\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 1051, in dispatch\n self.restart_connection_callback(ex.vbucket)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 815, in restart_vbucket_connection\n serverPort, self.bucket)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 1241, in direct_client\n bucket_info = rest.get_bucket(bucket)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/rest_client.py\", line 750, in get_bucket\n error=status)\nBucketUnavailableException: unable to find bucket beer-sample on the host @ 127.0.0.1\n\n" [ns_server:info,2012-11-13T10:36:43.865,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:idle:452]Restarting moxi on nodes ['ns_1@127.0.0.1'] [ns_server:debug,2012-11-13T10:36:43.865,ns_1@127.0.0.1:<0.13994.0>:ns_pubsub:do_subscribe_link:132]Parent process of subscription {buckets_events,<0.13993.0>} exited with reason normal [ns_server:info,2012-11-13T10:36:43.877,ns_1@127.0.0.1:<0.14246.0>:ns_port_sup:restart_port:134]restarting port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:debug,2012-11-13T10:36:43.878,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:terminate:143]Sending 'shutdown' to port [ns_server:info,2012-11-13T10:36:43.905,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:36:43.905,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:log:171]moxi<0.11156.0>: EOL on stdin. Exiting [ns_server:debug,2012-11-13T10:36:43.906,ns_1@127.0.0.1:<0.14271.0>:supervisor_cushion:init:43]starting ns_port_server with delay of 5000 [menelaus:info,2012-11-13T10:36:43.907,ns_1@127.0.0.1:<0.13448.0>:menelaus_web_buckets:handle_bucket_delete:340]Deleted bucket "beer-sample" [error_logger:info,2012-11-13T10:36:43.947,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.14271.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:36:44.133,ns_1@127.0.0.1:<0.14272.0>:ns_port_server:log:171]moxi<0.14272.0>: 2012-11-13 10:36:43: (cproxy_config.c.317) env: MOXI_SASL_PLAIN_USR (13) moxi<0.14272.0>: 2012-11-13 10:36:43: (cproxy_config.c.326) env: MOXI_SASL_PLAIN_PWD (8) [ns_server:debug,2012-11-13T10:36:50.232,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:36:50.232,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:37:13.376,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "[2012-11-13 10:37:13,376] - [rest_client] [140735135437184] - ERROR - http://127.0.0.1:8091/pools/default/buckets/beer-sample error 404 reason: unknown Requested resource not found.\r\n\n" [ns_server:debug,2012-11-13T10:37:13.376,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: "{'username': 'Administrator', 'node': '127.0.0.1:8091', 'password': 'password', 'bucket': 'beer-sample', 'ram_quota': 100} ['/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/../samples/beer-sample.zip']\nGot empty data (remote died?). from 127.0.0.1\nTraceback (most recent call last):\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 237, in \n" [ns_server:debug,2012-11-13T10:37:13.408,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: " main()\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 229, in main\n" [ns_server:debug,2012-11-13T10:37:13.409,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: " docloader.populate_docs()\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 187, in populate_docs\n self.unzip_file_and_upload()\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 171, in unzip_file_and_upload\n self.enumerate_and_save(working_dir)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 161, in enumerate_and_save\n self.enumerate_and_save(dir)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 161, in enumerate_and_save\n self.enumerate_and_save(dir)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 151, in enumerate_and_save\n self.save_doc(dockey, fp)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/tools/../../lib/python/cbdocloader\", line 129, in save_doc\n self.bucket.set(dockey, 0, 0, raw_data)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/client.py\", line 232, in set\n" [ns_server:debug,2012-11-13T10:37:13.411,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: " self.mc_client.set(key, expiration, flags, value)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 927, in set\n return self._respond(item, event)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 880, in _respond\n" [ns_server:debug,2012-11-13T10:37:13.412,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: " self.restart_vbucket_connection(self.vbucketid(item['key']))\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 815, in restart_vbucket_connection\n serverPort, self.bucket)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/couchbaseclient.py\", line 1241, in direct_client\n" [ns_server:debug,2012-11-13T10:37:13.413,ns_1@127.0.0.1:<0.12893.0>:samples_loader_tasks:wait_for_exit:101]output from beer-sample: " bucket_info = rest.get_bucket(bucket)\n File \"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/python/couchbase/rest_client.py\", line 750, in get_bucket\n error=status)\ncouchbase.exception.BucketUnavailableException: unable to find bucket beer-sample on the host @ 127.0.0.1\n" [ns_server:debug,2012-11-13T10:37:13.429,ns_1@127.0.0.1:samples_loader_tasks<0.4011.0>:samples_loader_tasks:handle_info:58]Consumed exit signal from samples loading task beer-sample: {'EXIT', <0.12893.0>, {failed_to_load_samples_with_status, 1}} [error_logger:error,2012-11-13T10:37:13.430,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: samples_loader_tasks:perform_loading_task/1 pid: <0.12893.0> registered_name: [] exception exit: {failed_to_load_samples_with_status,1} in function samples_loader_tasks:perform_loading_task/1 ancestors: [samples_loader_tasks,ns_server_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [<0.4011.0>] dictionary: [] trap_exit: false status: running heap_size: 28657 stack_size: 24 reductions: 30019 neighbours: [user:error,2012-11-13T10:37:13.442,ns_1@127.0.0.1:samples_loader_tasks<0.4011.0>:samples_loader_tasks:handle_info:64]Loading sample bucket beer-sample failed: {failed_to_load_samples_with_status, 1} [ns_server:debug,2012-11-13T10:37:13.443,ns_1@127.0.0.1:samples_loader_tasks<0.4011.0>:samples_loader_tasks:handle_info:68]Token holder died [ns_server:debug,2012-11-13T10:37:20.233,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:37:20.234,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:37:50.236,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:37:50.237,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:38:20.237,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:38:20.238,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2012-11-13T10:38:50.240,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:handle_info:266]No buckets to compact. Rescheduling compaction. [ns_server:debug,2012-11-13T10:38:50.240,ns_1@127.0.0.1:compaction_daemon<0.4002.0>:compaction_daemon:schedule_next_compaction:1199]Finished compaction too soon. Next run will be in 30s ------------------------------- logs_node (info): ------------------------------- [ns_server:info,2012-11-13T9:56:16.201,nonode@nohost:<0.63.0>:ns_server:init_logging:225]Started & configured logging [ns_server:info,2012-11-13T9:56:16.221,nonode@nohost:<0.63.0>:ns_server:log_pending:30]Static config terms: [{error_logger_mf_dir,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {error_logger_mf_maxbytes,10485760}, {error_logger_mf_maxfiles,20}, {path_config_bindir,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin"}, {path_config_etcdir,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase"}, {path_config_libdir,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib"}, {path_config_datadir,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase"}, {path_config_tmpdir,"/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/tmp"}, {loglevel_default,debug}, {loglevel_couchdb,info}, {loglevel_ns_server,debug}, {loglevel_error_logger,debug}, {loglevel_user,debug}, {loglevel_menelaus,debug}, {loglevel_ns_doctor,debug}, {loglevel_stats,debug}, {loglevel_rebalance,debug}, {loglevel_cluster,debug}, {loglevel_views,debug}, {loglevel_mapreduce_errors,debug}, {loglevel_xdcr,debug}] [error_logger:info,2012-11-13T9:56:16.418,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,crypto_sup} started: [{pid,<0.202.0>}, {name,crypto_server}, {mfargs,{crypto_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.419,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: crypto started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.432,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: public_key started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.477,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.208.0>}, {name,ftp_sup}, {mfargs,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.506,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,httpc_profile_sup} started: [{pid,<0.211.0>}, {name,httpc_manager}, {mfargs, {httpc_manager,start_link, [default,only_session_cookies,inets]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.507,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.210.0>}, {name,httpc_profile_sup}, {mfargs, {httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.511,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.212.0>}, {name,httpc_handler_sup}, {mfargs,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.512,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.209.0>}, {name,httpc_sup}, {mfargs, {httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.517,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.213.0>}, {name,httpd_sup}, {mfargs,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.534,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.214.0>}, {name,tftp_sup}, {mfargs,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.535,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: inets started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.535,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: oauth started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.569,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.220.0>}, {name,ssl_broker_sup}, {mfargs,{ssl_broker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.581,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.221.0>}, {name,ssl_manager}, {mfargs,{ssl_manager,start_link,[[]]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.584,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.222.0>}, {name,ssl_connection}, {mfargs,{ssl_connection_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:16.585,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: ssl started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.690,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.229.0>}, {name,ssl_server}, {mfargs,{ssl_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.691,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,lhttpc_sup} started: [{pid,<0.227.0>}, {name,lhttpc_manager}, {mfargs, {lhttpc_manager,start_link, [[{name,lhttpc_manager}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:16.692,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: lhttpc started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.702,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mochiweb started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.729,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.744,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.752,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.757,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: nonode@nohost [error_logger:info,2012-11-13T9:56:16.860,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.238.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.238.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.182,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.241.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.183,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.242.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.191,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.243.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.205,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.244.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.228,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.245.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.229,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.246.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.231,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.247.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.232,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.248.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.235,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.249.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.245,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.252.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.246,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.253.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.247,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.240.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.258,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.255.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.558,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.256.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.669,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.267.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.685,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.270.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.687,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.272.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.710,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.273.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.713,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.275.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.728,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.277.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.849,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.279.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.850,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.296.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.851,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.254.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.853,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.239.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:17.854,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.197.0>}, {name,cb_couch_sup}, {mfargs,{cb_couch_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,supervisor}] [error_logger:error,2012-11-13T9:56:17.916,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.249.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:17.917,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.298.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:17.995,nonode@nohost:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{9,56,17}}}, {memory, [{total,18081768}, {processes,5843992}, {processes_used,5837960}, {system,12237776}, {atom,853497}, {atom_used,849643}, {binary,92200}, {code,8180155}, {ets,1203608}]}, {loaded, [ns_info,log_os_info,couch_config_writer,cb_init_loggers, mochiweb_acceptor,inet_tcp,gen_tcp,mochiweb_socket, mochiweb_socket_server,mochilists,mochiweb_http,eval_bits, couch_httpd,couch_view,couch_set_view_ddoc_cache, couch_query_servers,couch_spatial,mapreduce, couch_set_view,snappy,couch_compress, couch_spatial_validation,couch_set_view_mapreduce,ejson, couch_doc,couch_db_update_notifier,couch_btree, couch_ref_counter,couch_uuids,couch_db_updater,couch_db, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,couch_index_barrier,couch_event_sup, couch_log,couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,155}, {node,nonode@nohost}, {nodes,[]}, {registered, [kernel_safe_sup,inet_db,rex,kernel_sup,couch_view, global_name_server,couch_auth_cache,couch_uuids,sasl_sup, file_server_2,couch_task_status,ale,lhttpc_manager, couch_server_sup,lhttpc_sup,couch_server,global_group, os_cmd_port_creator,couch_set_view_ddoc_cache, couch_index_merger_connection_pool,erl_prim_loader, tftp_sup,couch_spatial,'sink-ns_log',couch_rep_sup, 'sink-disk_stats','sink-disk_xdcr_errors',release_handler, standard_error_sup,couch_query_servers,'sink-disk_xdcr', httpc_sup,'sink-disk_debug','sink-disk_couchdb',overload, 'sink-disk_mapreduce_errors',error_logger,disk_log_sup, 'sink-stderr',httpc_profile_sup,'sink-disk_views', disk_log_server,httpc_manager,'sink-disk_error', alarm_handler,httpc_handler_sup,couch_set_view,os_mon_sup, code_server,ftp_sup,cpu_sup,application_controller, ssl_connection_sup,memsup,disksup,standard_error, couch_replica_index_barrier,couch_httpd,sasl_safe_sup, inets_sup,ssl_manager,couch_file_write_guard, crypto_server,couch_main_index_barrier,crypto_sup,ale_sup, couch_replication,timer_server,mochiweb_sup,couch_drv, couch_log,'sink-disk_default',ale_dynamic_sup, couch_task_events,ns_server_cluster_sup, couch_secondary_services,couch_db_update_notifier_sup, couch_primary_services,couch_db_update,user,cb_couch_sup, ssl_broker_sup,httpd_sup,ssl_server,init,ssl_sup, couch_config]}, {cookie,nocookie}, {wordsize,8}, {wall_clock,4}] [ns_server:info,2012-11-13T9:56:18.053,nonode@nohost:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:info,2012-11-13T9:56:18.073,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.301.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:18.076,nonode@nohost:dist_manager<0.302.0>:dist_manager:read_address_config:55]Reading ip config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/ip" [ns_server:info,2012-11-13T9:56:18.077,nonode@nohost:dist_manager<0.302.0>:dist_manager:init:125]ip config not found. Looks like we're brand new node [error_logger:info,2012-11-13T9:56:18.081,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,inet_gethost_native_sup} started: [{pid,<0.304.0>},{mfa,{inet_gethost_native,init,[[]]}}] [error_logger:info,2012-11-13T9:56:18.083,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.303.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:18.137,nonode@nohost:dist_manager<0.302.0>:dist_manager:bringup:193]Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:18.161,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.306.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.162,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.307.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.164,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.308.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.165,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.305.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.167,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.302.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.168,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.311.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.185,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.312.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.211,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.314.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.323,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.321.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.324,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.323.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.325,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.324.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.338,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.325.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.344,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.326.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.371,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.327.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.386,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.328.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.398,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.329.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.404,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.330.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.416,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.331.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:18.417,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.322.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.418,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:18.519,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.337.0>}, {name,dets_sup}, {mfargs,{dets_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:18.524,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.338.0>}, {name,dets}, {mfargs,{dets_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:19.273,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.315.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:19.275,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.313.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:19.298,ns_1@127.0.0.1:ns_config_sup<0.363.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T9:56:19.299,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.364.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:19.300,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.365.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:19.360,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [ns_server:info,2012-11-13T9:56:19.380,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [ns_server:info,2012-11-13T9:56:19.380,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:692]No dynamic config file found. Assuming we're brand new node [ns_server:info,2012-11-13T9:56:19.381,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{auto_failover_cfg,[{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}, {replication,[{enabled,true}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {buckets,[{configs,[]}]}, {memory_quota,2391}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',isasl}, [{path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {remote_clusters,[]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {rest,[{port,8091}]}, {{node,'ns_1@127.0.0.1',membership},active}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_num_worker_process,4}, {xdcr_connection_timeout,60}, {xdcr_worker_batch_size,100}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_checkpoint_interval,1800}] [ns_server:info,2012-11-13T9:56:19.386,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_7_to_1_7_1:302]Upgrading config from 1.7 to 1.7.1 [ns_server:info,2012-11-13T9:56:19.406,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_7_1_to_1_7_2:313]Upgrading config from 1.7.1 to 1.7.2 [ns_server:info,2012-11-13T9:56:19.409,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_7_2_to_1_8_0:367]Upgrading config from 1.7.2 to 1.8.0 [ns_server:info,2012-11-13T9:56:19.413,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_8_0_to_1_8_1:404]Upgrading config from 1.8.0 to 1.8.1 [ns_server:info,2012-11-13T9:56:19.420,ns_1@127.0.0.1:ns_config<0.366.0>:ns_config_default:upgrade_config_from_1_8_1_to_2_0:433]Upgrading config from 1.8.1 to 2.0 [error_logger:info,2012-11-13T9:56:19.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.366.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:19.485,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.369.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T9:56:19.514,ns_1@127.0.0.1:ns_config_isasl_sync<0.370.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T9:56:20.516,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.370.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.587,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.373.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.650,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.375.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.651,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.363.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:20.724,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.377.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:warn,2012-11-13T9:56:20.836,ns_1@127.0.0.1:ns_log<0.379.0>:ns_log:read_logs:69]Couldn't load logs from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log" (perhaps it's first startup): {error, enoent} [error_logger:info,2012-11-13T9:56:20.837,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.379.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.863,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.380.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.864,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.381.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:20.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.384.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2012-11-13T9:56:20.869,ns_1@127.0.0.1:ns_cookie_manager<0.311.0>:ns_cookie_manager:do_cookie_init:91]Initial otp cookie generated: bptrojzpwfmfrqou [error_logger:info,2012-11-13T9:56:20.893,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.385.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.245,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.389.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.253,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.390.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.391.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.308,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.392.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:21.310,ns_1@127.0.0.1:ns_config_log<0.373.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [error_logger:info,2012-11-13T9:56:21.310,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.383.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:21.353,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.398.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.354,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.400.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.355,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.401.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.356,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.402.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.369,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.404.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.370,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.403.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:21.372,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.405.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.373,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.406.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.379,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.408.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.418,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.411.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:error,2012-11-13T9:56:21.458,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [user:info,2012-11-13T9:56:21.477,ns_1@127.0.0.1:mb_master<0.414.0>:mb_master:init:89]I'm the only node, so I'm the master. [ns_server:info,2012-11-13T9:56:21.595,ns_1@127.0.0.1:ns_config<0.366.0>:ns_online_config_upgrader:upgrade_config_on_join_from_pre_2_0_to_2_0:65]Adding some 2.0 specific keys to the config [ns_server:error,2012-11-13T9:56:21.728,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [user:warn,2012-11-13T9:56:21.755,ns_1@127.0.0.1:<0.419.0>:ns_orchestrator:consider_switching_compat_mode:665]Changed cluster compat mode from undefined to [2,0] [ns_server:info,2012-11-13T9:56:21.756,ns_1@127.0.0.1:ns_config<0.366.0>:ns_online_config_upgrader:upgrade_config_from_pre_2_0_to_2_0:69]Performing online config upgrade to 2.0 version [error_logger:info,2012-11-13T9:56:21.757,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.419.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:21.804,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.435.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.005,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.436.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.006,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.414.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.007,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.437.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.008,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.438.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.009,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.439.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.021,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.440.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.026,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.441.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.091,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.444.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.093,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.461.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.122,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.462.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2012-11-13T9:56:22.127,ns_1@127.0.0.1:ns_server_sup<0.378.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2012-11-13T9:56:22.127,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.463.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.129,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.443.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.144,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.465.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.152,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.466.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:22.166,ns_1@127.0.0.1:<0.467.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2012-11-13T9:56:22.166,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.467.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.167,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.464.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.184,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.469.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.241,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.470.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:22.255,ns_1@127.0.0.1:<0.475.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2012-11-13T9:56:22.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.472.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.468.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:22.264,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.474.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.265,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.475.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.476.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:22.267,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.477.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:23.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.479.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.629,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.492.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.630,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.491.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:23.708,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.494.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.799,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.496.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.800,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.498.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.801,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.499.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:23.802,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.500.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:24.054,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.506.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:24.331,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.525.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:24.496,ns_1@127.0.0.1:set_view_update_daemon<0.528.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2012-11-13T9:56:24.497,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.528.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:24.499,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.530.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:24.500,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.378.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:24.500,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: ns_server started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T9:56:32.506,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.492.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:32.707,ns_1@127.0.0.1:ns_port_memcached<0.473.0>:ns_port_server:log:171]memcached<0.473.0>: EOL on stdin. Initiating shutdown [ns_server:info,2012-11-13T9:56:33.136,ns_1@127.0.0.1:ns_port_memcached<0.473.0>:ns_port_server:handle_info:104]Port server memcached exited with status 0 [ns_server:info,2012-11-13T9:56:33.137,ns_1@127.0.0.1:<0.471.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T9:56:33.138,ns_1@127.0.0.1:<0.471.0>:ns_port_server:log:171]moxi<0.471.0>: EOL on stdin. Exiting [ns_server:info,2012-11-13T9:56:33.138,ns_1@127.0.0.1:mb_master<0.414.0>:mb_master:terminate:288]Synchronously shutting down child mb_master_sup [error_logger:error,2012-11-13T9:56:33.242,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.377.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:33.386,ns_1@127.0.0.1:mb_mnesia<0.315.0>:mb_mnesia:terminate:277]Shut Mnesia down: shutdown. Exiting. [error_logger:info,2012-11-13T9:56:33.386,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mnesia exited: stopped type: temporary [error_logger:error,2012-11-13T9:56:33.491,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.483.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.483.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.480.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 2016 neighbours: [error_logger:error,2012-11-13T9:56:33.493,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.260.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.260.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.257.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 2583 neighbours: [error_logger:error,2012-11-13T9:56:33.494,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.484.0> terminating ** Last message in was {'EXIT',<0.245.0>,killed} ** When Server state == {db,<0.484.0>,<0.485.0>,nil,<<"1352829382613535">>, <0.480.0>,<0.486.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.480.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.480.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.480.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create,sys_db, {user_ctx, {user_ctx,null, [<<"_admin">>,<<"_replicator">>], undefined}}]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T9:56:33.498,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.484.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 313 neighbours: [error_logger:error,2012-11-13T9:56:33.499,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.261.0> terminating ** Last message in was {'EXIT',<0.245.0>,killed} ** When Server state == {db,<0.261.0>,<0.262.0>,nil,<<"1352829377441480">>, <0.257.0>,<0.263.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.257.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.257.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.257.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create, {user_ctx, {user_ctx,null,[<<"_admin">>],undefined}}, sys_db]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T9:56:33.503,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.261.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 395 neighbours: [error_logger:info,2012-11-13T9:56:33.504,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.504,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:info,2012-11-13T9:56:33.506,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.507,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.507,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.508,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T9:56:33.509,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.597.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.597.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.600.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.565,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.601.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.566,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.602.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.567,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.603.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.850,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.604.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.874,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.617.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.618.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.877,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.619.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.878,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.620.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.880,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.623.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.881,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.624.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.882,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.599.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.883,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.626.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.884,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.627.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.885,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.629.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.631.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.887,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.633.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.634.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.889,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.636.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.890,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.638.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.892,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.640.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.893,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.657.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.894,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.625.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:33.895,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.598.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:error,2012-11-13T9:56:33.897,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.620.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:33.896,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{9,56,33}}}, {memory, [{total,25813328}, {processes,7689912}, {processes_used,7559896}, {system,18123416}, {atom,1299409}, {atom_used,1270552}, {binary,228312}, {code,13004872}, {ets,1654792}]}, {loaded, [lib,mochinum,capi_utils,mochiweb_mime,mochiweb_io,mb_grid, mochijson2,set_view_update_daemon,mochiweb_response, xdc_rdoc_replication_srv,menelaus_web_buckets, compaction_daemon,menelaus_auth,stats_archiver, mochiweb_util,couch_httpd_view,system_stats_collector, mochiweb_request,couch_changes,ns_bucket_sup, mochiweb_headers,mochiweb,couch_set_view_util, geocouch_duplicates,xdc_rep_manager,ns_cluster_membership, ns_memcached_log_rotator,ns_port_server, supervisor_cushion,ns_port_init,ns_moxi_sup,ns_port_sup, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_util,menelaus_deps,menelaus_web,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,failover_safeness_level, mb_master_sup,cluster_compat_mode,gen_fsm, samples_loader_tasks,mb_master,xdc_replication_sup, remote_clusters_info,ns_bucket,ns_doctor,stats_reader, ns_heart,ns_mail_log,ns_mail_sup,work_queue, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, ns_node_disco,ns_node_disco_sup,ns_config_ets_dup,random, ns_log,ns_server_sup,ns_process_registry, cb_config_couch_sync,ns_config_log,ns_memcached,ns_pubsub, ns_config_isasl_sync,ns_config_replica,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, mnesia_index,mnesia_loader,file_sorter,dets_v9,dets_utils, dets_sup,dets_server,dets,mnesia_log,mnesia_snmp_hook, mnesia_checkpoint,mnesia_late_loader,mnesia_dumper, mnesia_snmp_sup,mnesia_checkpoint_sup,mnesia_frag, mnesia_tm,mnesia_recover,mnesia_sp,mnesia_locker, mnesia_event,mnesia_kernel_sup,mnesia_sup,mnesia_bup, mnesia_schema,mnesia_controller,mnesia_lib,mnesia_monitor, mnesia_subscr,mnesia,mb_mnesia,mb_mnesia_sup,ns_cluster, ns_cookie_manager,erl_epmd,inet_tcp_dist,gen_udp, inet_gethost_native,dist_manager,timeout_diag_logger, path_config,diag_handler,auth,ns_info,log_os_info, couch_config_writer,cb_init_loggers,mochiweb_acceptor, inet_tcp,gen_tcp,mochiweb_socket,mochiweb_socket_server, mochilists,mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view,snappy, couch_compress,couch_spatial_validation, couch_set_view_mapreduce,ejson,couch_doc, couch_db_update_notifier,couch_btree,couch_ref_counter, couch_uuids,couch_db_updater,couch_db,couch_auth_cache, couch_db_update_notifier_sup,couch_secondary_sup, couch_index_barrier,couch_event_sup,couch_log, couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {mnesia,"MNESIA CXC 138 12","4.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,170}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, ['sink-disk_xdcr','sink-disk_debug',ns_server_cluster_sup, 'sink-disk_couchdb','sink-disk_mapreduce_errors', couch_auth_cache,'sink-disk_views',erl_epmd, 'sink-disk_error',disk_log_sup,disk_log_server, code_server,application_controller,error_logger, couch_set_view,ale_sup,lhttpc_sup,ale_dynamic_sup, mochiweb_sup,auth,standard_error_sup,os_cmd_port_creator, kernel_safe_sup,lhttpc_manager,tftp_sup, couch_set_view_ddoc_cache,os_mon_sup, couch_index_merger_connection_pool,cpu_sup,couch_spatial, memsup,disksup,timer_server,couch_replica_index_barrier, couch_main_index_barrier,net_kernel,couch_replication, dist_manager,couch_task_events,rex,net_sup,couch_log, kernel_sup,global_name_server,file_server_2,cb_couch_sup, httpd_sup,ssl_connection_sup,'sink-disk_default', ssl_manager,ssl_broker_sup,ssl_server,sasl_safe_sup, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,ftp_sup,inets_sup,crypto_server, crypto_sup,sasl_sup,couch_secondary_services, couch_primary_services,couch_db_update, inet_gethost_native_sup,release_handler,couch_view, couch_uuids,overload,couch_task_status,alarm_handler, couch_server_sup,couch_server,dets_sup,dets,'sink-stderr', erl_prim_loader,couch_rep_sup,couch_query_servers, standard_error,init,couch_httpd,couch_file_write_guard, inet_gethost_native,couch_drv,inet_db, couch_db_update_notifier_sup,user,'sink-ns_log', couch_config,global_group,'sink-disk_stats', 'sink-disk_xdcr_errors']}, {cookie,bptrojzpwfmfrqou}, {wordsize,8}, {wall_clock,20}] [error_logger:info,2012-11-13T9:56:33.899,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.659.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:33.931,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:info,2012-11-13T9:56:33.935,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.662.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.937,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.663.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.938,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.664.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.939,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.666.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.676.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.960,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.678.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.962,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.679.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.963,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.680.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:33.965,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.681.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.597,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.682.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.599,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.704.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:34.601,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.705.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:34.602,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.706.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.603,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.707.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.604,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.677.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:34.604,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [ns_server:info,2012-11-13T9:56:34.940,ns_1@127.0.0.1:ns_config_sup<0.751.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T9:56:34.940,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.667.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.941,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.665.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:34.942,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T9:56:34.942,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.752.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:34.943,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [error_logger:info,2012-11-13T9:56:34.943,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.753.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:34.951,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {xdcr_worker_batch_size,100}, {xdcr_num_worker_process,4}, {xdcr_num_retries_per_request,2}, {xdcr_num_http_connections,20}, {xdcr_failure_restart_interval,30}, {xdcr_doc_batch_size_kb,512}, {xdcr_connection_timeout,60}, {xdcr_checkpoint_interval,1800}, {xdcr_capi_checkpoint_timeout,10}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,2391}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {buckets,[{configs,[]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}] [error_logger:info,2012-11-13T9:56:34.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.754.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:34.960,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.756.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T9:56:34.961,ns_1@127.0.0.1:ns_config_isasl_sync<0.757.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T9:56:35.962,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.757.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.963,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.760.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.762.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.966,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.751.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:35.966,ns_1@127.0.0.1:ns_node_disco_events<0.771.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@127.0.0.1'] [error_logger:info,2012-11-13T9:56:35.967,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.764.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.969,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.766.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.970,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.767.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.972,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.768.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.974,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.771.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:error,2012-11-13T9:56:35.974,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [user:info,2012-11-13T9:56:35.974,ns_1@127.0.0.1:mb_master<0.799.0>:mb_master:init:89]I'm the only node, so I'm the master. [error_logger:info,2012-11-13T9:56:35.975,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.772.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.977,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.775.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.776.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:35.979,ns_1@127.0.0.1:ns_config_log<0.760.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [error_logger:info,2012-11-13T9:56:35.980,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.777.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.983,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.778.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2012-11-13T9:56:35.984,ns_1@127.0.0.1:ns_server_sup<0.765.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2012-11-13T9:56:35.985,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.770.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T9:56:35.986,ns_1@127.0.0.1:<0.838.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2012-11-13T9:56:35.987,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.781.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.988,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.784.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.990,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.786.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.992,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.787.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:35.994,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.789.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:35.995,ns_1@127.0.0.1:<0.846.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2012-11-13T9:56:35.996,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.788.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:35.998,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.790.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.000,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.791.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.001,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.793.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.002,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.797.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.004,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.802.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.005,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.806.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.006,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.807.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.007,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.799.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.008,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.808.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.009,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.809.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.011,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.810.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.012,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.811.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:error,2012-11-13T9:56:36.014,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2012-11-13T9:56:36.014,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.812.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.016,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.815.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.017,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.832.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.018,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.833.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.019,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.834.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.020,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.814.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.021,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.836.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.022,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.837.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.023,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.838.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.024,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.835.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.025,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.840.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.029,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.841.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.036,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.843.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.041,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.839.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.042,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.845.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.043,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.846.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.044,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.848.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.045,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.849.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.064,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.850.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.065,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.861.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.067,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.859.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.068,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.862.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.070,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.864.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T9:56:36.070,ns_1@127.0.0.1:set_view_update_daemon<0.875.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2012-11-13T9:56:36.071,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.866.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.072,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.867.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.073,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.868.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T9:56:36.074,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.870.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.075,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.873.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.077,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.875.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.079,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.877.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T9:56:36.080,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.765.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:error,2012-11-13T10:00:17.454,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:handle_call:623]Failed to update config: {exit,{not_found,"default"}} Stacktrace: [{ns_bucket,'-delete_bucket_returning_config/1-fun-0-',4}, {misc,key_update_rec,4}, {ns_config,'-update_sub_key/3-fun-0-',3}, {ns_config,'-update_key/2-fun-0-',3}, {ns_config,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}] [menelaus:info,2012-11-13T10:00:17.457,ns_1@127.0.0.1:<0.816.0>:menelaus_web_buckets:do_bucket_create:415]Created bucket "default" of type: membase [{num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}] [ns_server:info,2012-11-13T10:00:17.719,ns_1@127.0.0.1:<0.842.0>:ns_port_server:log:171]moxi<0.842.0>: 2012-11-13 10:04:00: (agent_config.c.705) ERROR: bad JSON configuration from http://127.0.0.1:8091/pools/default/saslBucketsStreaming: Number of vBuckets must be a power of two > 0 and <= 65536 ({ moxi<0.842.0>: "name": "default", moxi<0.842.0>: "nodeLocator": "vbucket", moxi<0.842.0>: "saslPassword": "", moxi<0.842.0>: "nodes": [{ moxi<0.842.0>: "hostname": "127.0.0.1:8091", moxi<0.842.0>: "ports": { moxi<0.842.0>: "direct": 11210, moxi<0.842.0>: "proxy": 11211 moxi<0.842.0>: } moxi<0.842.0>: }], moxi<0.842.0>: "vBucketServerMap": { moxi<0.842.0>: "hashAlgorithm": "CRC", moxi<0.842.0>: "numReplicas": 1, moxi<0.842.0>: "serverList": ["127.0.0.1:11210"], moxi<0.842.0>: "vBucketMap": [] moxi<0.842.0>: } moxi<0.842.0>: }) [error_logger:info,2012-11-13T10:00:17.830,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.1685.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:00:18.012,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1687.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:00:18.274,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:ensure_bucket:1119]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=2507145216;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:00:18.275,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1703.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.310,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1709.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.329,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1710.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:00:18.337,ns_1@127.0.0.1:janitor_agent-default<0.1712.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [error_logger:info,2012-11-13T10:00:18.337,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1711.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:00:18.338,ns_1@127.0.0.1:janitor_agent-default<0.1712.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:00:18.339,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1712.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.356,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1713.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.373,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1714.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.374,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1716.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.375,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1718.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:00:18.376,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.1719.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:00:18.425,ns_1@127.0.0.1:ns_port_memcached<0.844.0>:ns_port_server:log:171]memcached<0.844.0>: Tue Nov 13 10:00:18.225294 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.844.0>: Tue Nov 13 10:00:18.226492 PST 3: Connected to mccouch: "localhost:11213" memcached<0.844.0>: Tue Nov 13 10:00:18.273915 PST 3: Warning: failed to load the engine session stats due to IO exception "basic_ios::clear" memcached<0.844.0>: Tue Nov 13 10:00:18.274020 PST 3: Failed to load mutation log, falling back to key dump memcached<0.844.0>: Tue Nov 13 10:00:18.274063 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.844.0>: Tue Nov 13 10:00:18.274235 PST 3: metadata loaded in 24 ms memcached<0.844.0>: Tue Nov 13 10:00:18.299836 PST 3: warmup completed in 24 ms [user:info,2012-11-13T10:00:18.533,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:handle_cast:581]Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:info,2012-11-13T10:00:18.736,ns_1@127.0.0.1:<0.1677.0>:ns_janitor:cleanup_with_membase_bucket_check_map:67]janitor decided to generate initial vbucket map [ns_server:info,2012-11-13T10:00:19.077,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:info,2012-11-13T10:00:19.078,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:00:19.078,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:00:19.079,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:00:19.080,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [ns_server:info,2012-11-13T10:00:19.080,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:info,2012-11-13T10:00:19.081,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [ns_server:info,2012-11-13T10:00:19.082,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:00:19.082,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:00:19.083,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:00:19.083,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [ns_server:info,2012-11-13T10:00:19.084,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [ns_server:info,2012-11-13T10:00:19.084,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:info,2012-11-13T10:00:19.084,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:info,2012-11-13T10:00:19.085,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:info,2012-11-13T10:00:19.085,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:00:19.086,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [ns_server:info,2012-11-13T10:00:19.086,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:info,2012-11-13T10:00:19.087,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [ns_server:info,2012-11-13T10:00:19.087,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:info,2012-11-13T10:00:19.087,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:00:19.088,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:00:19.088,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:00:19.089,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [ns_server:info,2012-11-13T10:00:19.089,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:info,2012-11-13T10:00:19.090,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:00:19.090,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:00:19.091,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [ns_server:info,2012-11-13T10:00:19.091,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:00:19.117,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:00:19.118,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:00:19.118,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [ns_server:info,2012-11-13T10:00:19.119,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:info,2012-11-13T10:00:19.119,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:00:19.120,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:00:19.120,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:00:19.121,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:00:19.121,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [ns_server:info,2012-11-13T10:00:19.122,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:info,2012-11-13T10:00:19.122,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:00:19.123,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:00:19.123,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:00:19.124,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [ns_server:info,2012-11-13T10:00:19.124,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:info,2012-11-13T10:00:19.124,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:00:19.125,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:info,2012-11-13T10:00:19.289,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [ns_server:info,2012-11-13T10:00:19.289,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:00:19.290,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:00:19.291,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:00:19.291,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [ns_server:info,2012-11-13T10:00:19.292,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:info,2012-11-13T10:00:19.292,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:00:19.292,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:00:19.293,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [ns_server:info,2012-11-13T10:00:19.293,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [ns_server:info,2012-11-13T10:00:19.293,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:00:19.294,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:00:19.294,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [ns_server:info,2012-11-13T10:00:19.295,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:00:19.295,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:00:19.295,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [ns_server:info,2012-11-13T10:00:19.296,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [ns_server:info,2012-11-13T10:00:19.296,ns_1@127.0.0.1:<0.1706.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:00:19.297,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "default" [ns_server:info,2012-11-13T10:00:19.297,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:handle_call:248]Bucket "default" marked as warmed in 1 seconds [ns_server:info,2012-11-13T10:00:20.999,ns_1@127.0.0.1:ns_doctor<0.793.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["default"] [ns_server:info,2012-11-13T10:00:36.077,ns_1@127.0.0.1:<0.2255.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:00:36.111,ns_1@127.0.0.1:<0.2255.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:01:06.122,ns_1@127.0.0.1:<0.2388.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:01:06.124,ns_1@127.0.0.1:<0.2388.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [user:info,2012-11-13T10:01:24.323,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:terminate:661]Shutting down bucket "default" on 'ns_1@127.0.0.1' for server shutdown [ns_server:info,2012-11-13T10:01:24.323,ns_1@127.0.0.1:ns_memcached-default<0.1703.0>:ns_memcached:terminate:672]This bucket shutdown is not due to bucket deletion. Doing nothing [error_logger:error,2012-11-13T10:01:24.326,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.861.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:24.526,ns_1@127.0.0.1:ns_port_memcached<0.844.0>:ns_port_server:log:171]memcached<0.844.0>: EOL on stdin. Initiating shutdown memcached<0.844.0>: Tue Nov 13 10:01:24.491710 PST 3: Shutting down tap connections! memcached<0.844.0>: Tue Nov 13 10:01:24.492891 PST 3: Had to wait 1081 usec for shutdown [ns_server:error,2012-11-13T10:01:24.526,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:info,2012-11-13T10:01:24.533,ns_1@127.0.0.1:<0.1704.0>:mc_connection:run_loop:202]mccouch connection was normally closed [ns_server:info,2012-11-13T10:01:24.533,ns_1@127.0.0.1:ns_port_memcached<0.844.0>:ns_port_server:handle_info:104]Port server memcached exited with status 0 [ns_server:info,2012-11-13T10:01:24.534,ns_1@127.0.0.1:<0.842.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:01:24.535,ns_1@127.0.0.1:<0.842.0>:ns_port_server:log:171]moxi<0.842.0>: EOL on stdin. Exiting [ns_server:info,2012-11-13T10:01:24.536,ns_1@127.0.0.1:mb_master<0.799.0>:mb_master:terminate:288]Synchronously shutting down child mb_master_sup [error_logger:error,2012-11-13T10:01:24.655,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.764.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:24.830,ns_1@127.0.0.1:mb_mnesia<0.667.0>:mb_mnesia:terminate:277]Shut Mnesia down: shutdown. Exiting. [error_logger:info,2012-11-13T10:01:24.830,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mnesia exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:24.856,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.614.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.614.0>,<0.615.0>,nil,<<"1352829393849668">>, <0.611.0>,<0.616.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.611.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.611.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.611.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.861,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.614.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 241 neighbours: [error_logger:error,2012-11-13T10:01:24.862,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1905.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1905.0>,<0.1906.0>,nil,<<"1352829621747368">>, <0.1902.0>,<0.1907.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1902.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1902.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1902.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/46">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/46.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1905.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 573 neighbours: [error_logger:error,2012-11-13T10:01:24.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.608.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.608.0>,<0.609.0>,nil,<<"1352829393812330">>, <0.605.0>,<0.610.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.605.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.605.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.605.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.608.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 328 neighbours: [error_logger:error,2012-11-13T10:01:24.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1967.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1967.0>,<0.1968.0>,nil,<<"1352829622857071">>, <0.1964.0>,<0.1969.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1964.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1964.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1964.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/36">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/36.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1967.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 565 neighbours: [error_logger:error,2012-11-13T10:01:24.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1858.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1858.0>,<0.1859.0>,nil,<<"1352829620929634">>, <0.1855.0>,<0.1860.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1855.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1855.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1855.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/52">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/52.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.880,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1858.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 584 neighbours: [error_logger:error,2012-11-13T10:01:24.896,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2079.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2079.0>,<0.2080.0>,nil,<<"1352829624750495">>, <0.2076.0>,<0.2081.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2076.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2076.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2076.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/18">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/18.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.900,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2079.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.901,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2055.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2055.0>,<0.2056.0>,nil,<<"1352829624313759">>, <0.2052.0>,<0.2057.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2052.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2052.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2052.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/22">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/22.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.905,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2055.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.906,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2085.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2085.0>,<0.2086.0>,nil,<<"1352829624862447">>, <0.2082.0>,<0.2087.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2082.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2082.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2082.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/17">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/17.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2085.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2121.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2121.0>,<0.2122.0>,nil,<<"1352829625534387">>, <0.2118.0>,<0.2123.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2118.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2118.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2118.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/11">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/11.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.916,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2121.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 616 neighbours: [error_logger:error,2012-11-13T10:01:24.917,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2176.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2176.0>,<0.2177.0>,nil,<<"1352829626420340">>, <0.2173.0>,<0.2178.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2173.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2173.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2173.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/5">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/5.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2176.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 611 neighbours: [error_logger:error,2012-11-13T10:01:24.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2170.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2170.0>,<0.2171.0>,nil,<<"1352829626308033">>, <0.2167.0>,<0.2172.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2167.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2167.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2167.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/6">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/6.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.925,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2170.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 619 neighbours: [error_logger:error,2012-11-13T10:01:24.927,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.1693.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.1693.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.1690.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 863 neighbours: [error_logger:error,2012-11-13T10:01:24.928,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2139.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2139.0>,<0.2140.0>,nil,<<"1352829625961162">>, <0.2136.0>,<0.2141.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2136.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2136.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2136.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/8">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/8.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.932,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2139.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 644 neighbours: [error_logger:error,2012-11-13T10:01:24.932,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2182.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2182.0>,<0.2183.0>,nil,<<"1352829626532283">>, <0.2179.0>,<0.2184.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2179.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2179.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2179.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/4">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/4.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.937,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2182.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 539 neighbours: [error_logger:error,2012-11-13T10:01:24.938,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1949.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1949.0>,<0.1950.0>,nil,<<"1352829622541445">>, <0.1946.0>,<0.1951.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1946.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1946.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1946.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/39">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/39.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.942,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1949.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 673 neighbours: [error_logger:error,2012-11-13T10:01:24.943,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1864.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1864.0>,<0.1865.0>,nil,<<"1352829621044192">>, <0.1861.0>,<0.1876.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1861.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1861.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1861.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/51">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/51.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.946,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1864.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 668 neighbours: [error_logger:error,2012-11-13T10:01:24.947,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2097.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2097.0>,<0.2098.0>,nil,<<"1352829625075442">>, <0.2094.0>,<0.2099.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2094.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2094.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2094.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/15">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/15.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.950,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1887.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1887.0>,<0.1888.0>,nil,<<"1352829621295039">>, <0.1884.0>,<0.1889.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1884.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1884.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1884.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/49">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/49.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.953,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2097.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 676 neighbours: [error_logger:error,2012-11-13T10:01:24.954,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1816.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1816.0>,<0.1817.0>,nil,<<"1352829620203018">>, <0.1813.0>,<0.1818.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1813.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1813.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1813.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/59">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/59.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1887.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 693 neighbours: [error_logger:error,2012-11-13T10:01:24.959,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1816.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 688 neighbours: [error_logger:error,2012-11-13T10:01:24.959,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1880.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1880.0>,<0.1881.0>,nil,<<"1352829621166446">>, <0.1877.0>,<0.1883.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1877.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/50">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/50.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.963,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1880.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:info,2012-11-13T10:01:24.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:24.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1955.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1955.0>,<0.1956.0>,nil,<<"1352829622642724">>, <0.1952.0>,<0.1957.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1952.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1952.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1952.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/38">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/38.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.968,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1955.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 725 neighbours: [error_logger:error,2012-11-13T10:01:24.969,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1822.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1822.0>,<0.1823.0>,nil,<<"1352829620303621">>, <0.1819.0>,<0.1824.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1819.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1819.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1819.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/58">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/58.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.972,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2043.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2043.0>,<0.2044.0>,nil,<<"1352829624110888">>, <0.2040.0>,<0.2045.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2040.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2040.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2040.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/24">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/24.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.976,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1822.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 720 neighbours: [error_logger:error,2012-11-13T10:01:24.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2043.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 724 neighbours: [error_logger:error,2012-11-13T10:01:24.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2091.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2091.0>,<0.2092.0>,nil,<<"1352829624963872">>, <0.2088.0>,<0.2093.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2088.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2088.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2088.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/16">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/16.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2091.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 736 neighbours: [error_logger:error,2012-11-13T10:01:24.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2015.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2015.0>,<0.2016.0>,nil,<<"1352829623674489">>, <0.2012.0>,<0.2017.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2012.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2012.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2012.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/28">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/28.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.986,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2015.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 744 neighbours: [error_logger:error,2012-11-13T10:01:24.987,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1840.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1840.0>,<0.1841.0>,nil,<<"1352829620605270">>, <0.1837.0>,<0.1842.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1837.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1837.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1837.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/55">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/55.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.990,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1840.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 752 neighbours: [error_logger:info,2012-11-13T10:01:24.991,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:25.114,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1846.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1846.0>,<0.1847.0>,nil,<<"1352829620717440">>, <0.1843.0>,<0.1848.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1843.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1843.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1843.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/54">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/54.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.119,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1846.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 764 neighbours: [error_logger:error,2012-11-13T10:01:25.119,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2200.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2200.0>,<0.2201.0>,nil,<<"1352829626845747">>, <0.2197.0>,<0.2202.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2197.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2197.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2197.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/1">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/1.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.123,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2200.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 759 neighbours: [error_logger:error,2012-11-13T10:01:25.124,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1852.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1852.0>,<0.1853.0>,nil,<<"1352829620818124">>, <0.1849.0>,<0.1854.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1849.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1849.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1849.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/53">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/53.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.128,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1852.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 780 neighbours: [error_logger:error,2012-11-13T10:01:25.129,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1828.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1828.0>,<0.1829.0>,nil,<<"1352829620404369">>, <0.1825.0>,<0.1830.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1825.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1825.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1825.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/57">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/57.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:25.132,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:25.134,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1828.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 788 neighbours: [error_logger:info,2012-11-13T10:01:25.134,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:25.135,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2049.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2049.0>,<0.2050.0>,nil,<<"1352829624211604">>, <0.2046.0>,<0.2051.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2046.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2046.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2046.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/23">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/23.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.138,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2049.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 791 neighbours: [error_logger:info,2012-11-13T10:01:25.139,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:25.140,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:25.140,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:25.141,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1802.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1802.0>,<0.1803.0>,nil,<<"1352829619934967">>, <0.1799.0>,<0.1804.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1799.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/61">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/61.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.144,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1802.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 824 neighbours: [error_logger:error,2012-11-13T10:01:25.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2009.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2009.0>,<0.2010.0>,nil,<<"1352829623563532">>, <0.2006.0>,<0.2011.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2006.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2006.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2006.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/29">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/29.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:25.148,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:25.149,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2009.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 832 neighbours: [error_logger:error,2012-11-13T10:01:25.149,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1834.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1834.0>,<0.1835.0>,nil,<<"1352829620504957">>, <0.1831.0>,<0.1836.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1831.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1831.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1831.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/56">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/56.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.153,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1834.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 844 neighbours: [error_logger:error,2012-11-13T10:01:25.153,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2133.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2133.0>,<0.2134.0>,nil,<<"1352829625860252">>, <0.2130.0>,<0.2135.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2130.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2130.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2130.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/9">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/9.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.157,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2133.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 852 neighbours: [error_logger:error,2012-11-13T10:01:25.157,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2003.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2003.0>,<0.2004.0>,nil,<<"1352829623450371">>, <0.2000.0>,<0.2005.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2000.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2000.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2000.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/30">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/30.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.161,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2003.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 860 neighbours: [error_logger:error,2012-11-13T10:01:25.161,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1925.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1925.0>,<0.1926.0>,nil,<<"1352829622094265">>, <0.1922.0>,<0.1927.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1922.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1922.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1922.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/43">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/43.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.164,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1808.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1808.0>,<0.1809.0>,nil,<<"1352829620105814">>, <0.1805.0>,<0.1812.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1805.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/60">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/60.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.168,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1925.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 881 neighbours: [error_logger:error,2012-11-13T10:01:25.169,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1808.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.169,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2109.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2109.0>,<0.2110.0>,nil,<<"1352829625288026">>, <0.2106.0>,<0.2111.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2106.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2106.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2106.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/13">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/13.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.173,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2109.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.173,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1979.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1979.0>,<0.1980.0>,nil,<<"1352829623047373">>, <0.1976.0>,<0.1981.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1976.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1976.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1976.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/34">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/34.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.177,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1979.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 573 neighbours: [error_logger:error,2012-11-13T10:01:25.177,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1899.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1899.0>,<0.1900.0>,nil,<<"1352829621624691">>, <0.1896.0>,<0.1901.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1896.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1896.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1896.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/47">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/47.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.180,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1961.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1961.0>,<0.1962.0>,nil,<<"1352829622754407">>, <0.1958.0>,<0.1963.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1958.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1958.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1958.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/37">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/37.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.184,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1899.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 581 neighbours: [error_logger:error,2012-11-13T10:01:25.185,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1961.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 585 neighbours: [error_logger:error,2012-11-13T10:01:25.186,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2127.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2127.0>,<0.2128.0>,nil,<<"1352829625668919">>, <0.2124.0>,<0.2129.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2124.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2124.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2124.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/10">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/10.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.189,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2127.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 584 neighbours: [error_logger:error,2012-11-13T10:01:25.190,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2021.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2021.0>,<0.2022.0>,nil,<<"1352829623786335">>, <0.2018.0>,<0.2023.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2018.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2018.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2018.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/27">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/27.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.193,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2021.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.194,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1937.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1937.0>,<0.1938.0>,nil,<<"1352829622323724">>, <0.1934.0>,<0.1939.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1934.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1934.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1934.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/41">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/41.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.197,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1937.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 565 neighbours: [error_logger:error,2012-11-13T10:01:25.198,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2073.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2073.0>,<0.2074.0>,nil,<<"1352829624649630">>, <0.2070.0>,<0.2075.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2070.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2070.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2070.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/19">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/19.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.201,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2073.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 940 neighbours: [error_logger:error,2012-11-13T10:01:25.202,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2067.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2067.0>,<0.2068.0>,nil,<<"1352829624537966">>, <0.2064.0>,<0.2069.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2064.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2064.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2064.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/20">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/20.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.206,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2067.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.206,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1796.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1796.0>,<0.1797.0>,nil,<<"1352829619826563">>, <0.1793.0>,<0.1798.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1793.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/62">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/62.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.210,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1796.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 956 neighbours: [error_logger:error,2012-11-13T10:01:25.210,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2206.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2206.0>,<0.2207.0>,nil,<<"1352829626935149">>, <0.2203.0>,<0.2208.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2203.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2203.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2203.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/0">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/0.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.214,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2206.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 951 neighbours: [error_logger:error,2012-11-13T10:01:25.215,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2115.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2115.0>,<0.2116.0>,nil,<<"1352829625388570">>, <0.2112.0>,<0.2117.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2112.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2112.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2112.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/12">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/12.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.218,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2115.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 972 neighbours: [error_logger:error,2012-11-13T10:01:25.219,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1973.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1973.0>,<0.1974.0>,nil,<<"1352829622946642">>, <0.1970.0>,<0.1975.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1970.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1970.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1970.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/35">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/35.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.222,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1973.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 993 neighbours: [error_logger:error,2012-11-13T10:01:25.223,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2188.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2188.0>,<0.2189.0>,nil,<<"1352829626632806">>, <0.2185.0>,<0.2190.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2185.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2185.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2185.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/3">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/3.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.227,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2188.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 975 neighbours: [error_logger:error,2012-11-13T10:01:25.228,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2145.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2145.0>,<0.2146.0>,nil,<<"1352829626107228">>, <0.2142.0>,<0.2162.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2142.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2142.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2142.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/7">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/7.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.231,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2145.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.232,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1931.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1931.0>,<0.1932.0>,nil,<<"1352829622206092">>, <0.1928.0>,<0.1933.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1928.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1928.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1928.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/42">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/42.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.235,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1931.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 569 neighbours: [error_logger:error,2012-11-13T10:01:25.236,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2103.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2103.0>,<0.2104.0>,nil,<<"1352829625181072">>, <0.2100.0>,<0.2105.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2100.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2100.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2100.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/14">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/14.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.239,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2103.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 564 neighbours: [error_logger:error,2012-11-13T10:01:25.240,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1911.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1911.0>,<0.1912.0>,nil,<<"1352829621859289">>, <0.1908.0>,<0.1913.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1908.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1908.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1908.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/45">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/45.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.243,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1911.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1033 neighbours: [error_logger:error,2012-11-13T10:01:25.244,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1997.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1997.0>,<0.1998.0>,nil,<<"1352829623349426">>, <0.1994.0>,<0.1999.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1994.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1994.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1994.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/31">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/31.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.247,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1997.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1028 neighbours: [error_logger:error,2012-11-13T10:01:25.248,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2033.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2033.0>,<0.2034.0>,nil,<<"1352829624009960">>, <0.2030.0>,<0.2039.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2030.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2030.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2030.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/25">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/25.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.251,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2033.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1036 neighbours: [error_logger:error,2012-11-13T10:01:25.252,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1893.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1893.0>,<0.1894.0>,nil,<<"1352829621487024">>, <0.1890.0>,<0.1895.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1890.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1890.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1890.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/48">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/48.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1893.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1057 neighbours: [error_logger:error,2012-11-13T10:01:25.256,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1991.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1991.0>,<0.1992.0>,nil,<<"1352829623248968">>, <0.1988.0>,<0.1993.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1988.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1988.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1988.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/32">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/32.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.259,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1991.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1052 neighbours: [error_logger:error,2012-11-13T10:01:25.260,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2027.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2027.0>,<0.2028.0>,nil,<<"1352829623887061">>, <0.2024.0>,<0.2029.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2024.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2024.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2024.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/26">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/26.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1694.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1694.0>,<0.1695.0>,nil,<<"1352829618011823">>, <0.1690.0>,<0.1700.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/master">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/master.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2027.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1060 neighbours: [error_logger:error,2012-11-13T10:01:25.268,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1694.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 855 neighbours: [error_logger:error,2012-11-13T10:01:25.269,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1943.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1943.0>,<0.1944.0>,nil,<<"1352829622429697">>, <0.1940.0>,<0.1945.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1940.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1940.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1940.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/40">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/40.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.272,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1943.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1089 neighbours: [error_logger:error,2012-11-13T10:01:25.273,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1917.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1917.0>,<0.1918.0>,nil,<<"1352829621982184">>, <0.1914.0>,<0.1921.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1914.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1914.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1914.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/44">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/44.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.276,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1917.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1097 neighbours: [error_logger:error,2012-11-13T10:01:25.277,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2194.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2194.0>,<0.2195.0>,nil,<<"1352829626733573">>, <0.2191.0>,<0.2196.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2191.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2191.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2191.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/2">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/2.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1786.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1786.0>,<0.1787.0>,nil,<<"1352829619455301">>, <0.1783.0>,<0.1790.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1783.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1783.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1783.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/63">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/63.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.283,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2194.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 539 neighbours: [error_logger:error,2012-11-13T10:01:25.284,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1786.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.285,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1985.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1985.0>,<0.1986.0>,nil,<<"1352829623147982">>, <0.1982.0>,<0.1987.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1982.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1982.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1982.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/33">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/33.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.288,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1985.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.289,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2061.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2061.0>,<0.2062.0>,nil,<<"1352829624436684">>, <0.2058.0>,<0.2063.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2058.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2058.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2058.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/21">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/21.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.292,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2061.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:info,2012-11-13T10:01:25.294,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.2479.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.2479.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.295,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2482.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.297,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2483.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.298,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2484.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.298,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2485.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.766,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2486.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.767,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2889.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.768,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2890.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.769,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2891.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.771,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2892.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.772,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2895.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.774,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2896.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.775,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.2481.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.776,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2898.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.788,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2899.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.789,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2901.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.790,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2903.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.791,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2905.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.793,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2906.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.793,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2908.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.794,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2910.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.795,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2912.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.796,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.2929.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.797,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.2897.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:25.799,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.2480.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:error,2012-11-13T10:01:25.800,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.2892.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.802,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.2931.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:25.799,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{10,1,25}}}, {memory, [{total,32386488}, {processes,12537488}, {processes_used,12447728}, {system,19849000}, {atom,1397745}, {atom_used,1366130}, {binary,672328}, {code,13855624}, {ets,1928984}]}, {loaded, [capi_frontend,capi_spatial,qlc,mb_map,ns_rebalancer, ns_janitor_map_recoverer,stats_collector, couch_stats_reader,ns_vbm_sup,ns_vbm_new_sup, tap_replication_manager,mc_connection, capi_ddoc_replication_srv,capi_set_view_manager,mc_binary, single_bucket_sup,janitor_agent,mc_client_binary, ns_janitor,menelaus_web_remote_clusters,lib,mochinum, capi_utils,mochiweb_mime,mochiweb_io,mb_grid,mochijson2, set_view_update_daemon,mochiweb_response, xdc_rdoc_replication_srv,menelaus_web_buckets, compaction_daemon,menelaus_auth,stats_archiver, mochiweb_util,couch_httpd_view,system_stats_collector, mochiweb_request,couch_changes,ns_bucket_sup, mochiweb_headers,mochiweb,couch_set_view_util, geocouch_duplicates,xdc_rep_manager,ns_cluster_membership, ns_memcached_log_rotator,ns_port_server, supervisor_cushion,ns_port_init,ns_moxi_sup,ns_port_sup, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_util,menelaus_deps,menelaus_web,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,failover_safeness_level, mb_master_sup,cluster_compat_mode,gen_fsm, samples_loader_tasks,mb_master,xdc_replication_sup, remote_clusters_info,ns_bucket,ns_doctor,stats_reader, ns_heart,ns_mail_log,ns_mail_sup,work_queue, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, ns_node_disco,ns_node_disco_sup,ns_config_ets_dup,random, ns_log,ns_server_sup,ns_process_registry, cb_config_couch_sync,ns_config_log,ns_memcached,ns_pubsub, ns_config_isasl_sync,ns_config_replica,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, mnesia_index,mnesia_loader,file_sorter,dets_v9,dets_utils, dets_sup,dets_server,dets,mnesia_log,mnesia_snmp_hook, mnesia_checkpoint,mnesia_late_loader,mnesia_dumper, mnesia_snmp_sup,mnesia_checkpoint_sup,mnesia_frag, mnesia_tm,mnesia_recover,mnesia_sp,mnesia_locker, mnesia_event,mnesia_kernel_sup,mnesia_sup,mnesia_bup, mnesia_schema,mnesia_controller,mnesia_lib,mnesia_monitor, mnesia_subscr,mnesia,mb_mnesia,mb_mnesia_sup,ns_cluster, ns_cookie_manager,erl_epmd,inet_tcp_dist,gen_udp, inet_gethost_native,dist_manager,timeout_diag_logger, path_config,diag_handler,auth,ns_info,log_os_info, couch_config_writer,cb_init_loggers,mochiweb_acceptor, inet_tcp,gen_tcp,mochiweb_socket,mochiweb_socket_server, mochilists,mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view,snappy, couch_compress,couch_spatial_validation, couch_set_view_mapreduce,ejson,couch_doc, couch_db_update_notifier,couch_btree,couch_ref_counter, couch_uuids,couch_db_updater,couch_db,couch_auth_cache, couch_db_update_notifier_sup,couch_secondary_sup, couch_index_barrier,couch_event_sup,couch_log, couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {mnesia,"MNESIA CXC 138 12","4.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,560}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, ['sink-disk_xdcr','sink-disk_debug',ns_server_cluster_sup, 'sink-disk_couchdb','sink-disk_mapreduce_errors', couch_auth_cache,'sink-disk_views',erl_epmd, 'sink-disk_error',disk_log_sup,disk_log_server, code_server,application_controller,error_logger, couch_set_view,ale_sup,lhttpc_sup,ale_dynamic_sup, mochiweb_sup,auth,standard_error_sup,os_cmd_port_creator, kernel_safe_sup,lhttpc_manager,tftp_sup, couch_set_view_ddoc_cache,os_mon_sup, couch_index_merger_connection_pool,cpu_sup,couch_spatial, memsup,disksup,timer_server,couch_replica_index_barrier, couch_main_index_barrier,net_kernel,couch_replication, dist_manager,couch_task_events,rex,net_sup,couch_log, kernel_sup,global_name_server,file_server_2,cb_couch_sup, httpd_sup,ssl_connection_sup,'sink-disk_default', ssl_manager,ssl_broker_sup,ssl_server,sasl_safe_sup, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,ftp_sup,inets_sup,crypto_server, crypto_sup,sasl_sup,couch_secondary_services, couch_primary_services,couch_db_update, inet_gethost_native_sup,release_handler,couch_view, couch_uuids,overload,couch_task_status,alarm_handler, couch_server_sup,couch_server,dets_sup,dets,'sink-stderr', erl_prim_loader,couch_rep_sup,couch_query_servers, standard_error,init,couch_httpd,couch_file_write_guard, inet_gethost_native,couch_drv,inet_db, couch_db_update_notifier_sup,user,'sink-ns_log', couch_config,global_group,'sink-disk_stats', 'sink-disk_xdcr_errors']}, {cookie,bptrojzpwfmfrqou}, {wordsize,8}, {wall_clock,312}] [ns_server:info,2012-11-13T10:01:25.826,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:info,2012-11-13T10:01:25.829,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2934.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.831,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2935.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.832,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2936.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.833,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.2938.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.834,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.2948.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2950.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.836,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2951.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.838,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2952.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:25.839,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2953.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:27.243,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2954.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:27.246,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2986.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:27.247,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2987.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:27.248,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2988.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:27.249,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.2989.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:27.251,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.2949.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:27.251,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [ns_server:info,2012-11-13T10:01:28.524,ns_1@127.0.0.1:ns_config_sup<0.3077.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:28.524,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.2939.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:28.525,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.2937.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:28.526,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3078.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:28.526,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:28.527,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3079.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:28.528,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [ns_server:info,2012-11-13T10:01:28.541,ns_1@127.0.0.1:ns_config<0.3080.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {memory_quota,2391}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {nodes_wanted,['ns_1@127.0.0.1']}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}] [error_logger:info,2012-11-13T10:01:28.554,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3080.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:28.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3082.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T10:01:28.557,ns_1@127.0.0.1:ns_config_isasl_sync<0.3083.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T10:01:29.560,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3083.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.562,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3086.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.563,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3088.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3077.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.565,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3090.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.570,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3092.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.572,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3093.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3094.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.574,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3097.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:29.575,ns_1@127.0.0.1:ns_node_disco_events<0.3097.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@127.0.0.1'] [error_logger:info,2012-11-13T10:01:29.576,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3098.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.577,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3101.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3102.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.582,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3103.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3104.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.587,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3096.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:29.590,ns_1@127.0.0.1:remote_clusters_info<0.3123.0>:remote_clusters_info:read_or_create_table:384]Reading remote_clusters_info content from /Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/remote_clusters_cache [error_logger:info,2012-11-13T10:01:29.592,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3107.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.594,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3111.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.596,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3112.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:error,2012-11-13T10:01:29.597,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2012-11-13T10:01:29.598,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3113.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.601,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.3115.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2012-11-13T10:01:29.602,ns_1@127.0.0.1:mb_master<0.3130.0>:mb_master:init:89]I'm the only node, so I'm the master. [ns_server:info,2012-11-13T10:01:29.603,ns_1@127.0.0.1:ns_config_log<0.3086.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [error_logger:info,2012-11-13T10:01:29.605,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3114.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.608,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3116.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.611,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3117.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.617,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3120.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2012-11-13T10:01:29.620,ns_1@127.0.0.1:ns_server_sup<0.3091.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2012-11-13T10:01:29.621,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3123.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.623,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3133.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:29.623,ns_1@127.0.0.1:<0.3170.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2012-11-13T10:01:29.625,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3138.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.628,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3139.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.631,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3130.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.634,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3140.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.638,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3141.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:29.640,ns_1@127.0.0.1:<0.3179.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2012-11-13T10:01:29.643,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3142.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.646,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3143.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.649,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3144.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.651,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3147.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.653,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3164.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.654,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3165.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.655,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3166.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.657,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3146.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.658,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3168.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.659,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3169.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.661,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3170.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.662,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3167.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.664,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3172.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:error,2012-11-13T10:01:29.667,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2012-11-13T10:01:29.671,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3173.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:warn,2012-11-13T10:01:29.685,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [ns_server:info,2012-11-13T10:01:29.693,ns_1@127.0.0.1:<0.3213.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:01:29.697,ns_1@127.0.0.1:<0.3213.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [error_logger:info,2012-11-13T10:01:29.690,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3175.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.701,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3171.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.703,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3178.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3179.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.709,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3181.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:29.710,ns_1@127.0.0.1:set_view_update_daemon<0.3218.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2012-11-13T10:01:29.712,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3182.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.713,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3183.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.715,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3189.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.716,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3187.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.717,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3191.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.718,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3193.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.719,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3190.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.721,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3203.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.722,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3205.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.723,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3206.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.724,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3207.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:29.726,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3209.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.727,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3212.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.729,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3218.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.730,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3220.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:29.732,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3091.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:30.701,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:ensure_bucket:1119]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=2507145216;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:01:30.703,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3201.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:30.704,ns_1@127.0.0.1:janitor_agent-default<0.3237.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2012-11-13T10:01:30.705,ns_1@127.0.0.1:janitor_agent-default<0.3237.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:01:30.705,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3234.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.709,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3235.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:30.710,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3236.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:30.712,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3237.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.714,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3238.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.715,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3239.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.717,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3241.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.718,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3243.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:30.719,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3244.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:30.890,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:log:171]memcached<0.3177.0>: Tue Nov 13 10:01:30.690386 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.3177.0>: Tue Nov 13 10:01:30.691887 PST 3: Connected to mccouch: "localhost:11213" memcached<0.3177.0>: Tue Nov 13 10:01:30.701758 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.3177.0>: Tue Nov 13 10:01:30.708314 PST 3: Failed to load mutation log, falling back to key dump memcached<0.3177.0>: Tue Nov 13 10:01:30.714911 PST 3: metadata loaded in 21 ms memcached<0.3177.0>: Tue Nov 13 10:01:30.718107 PST 3: warmup completed in 24 ms [user:info,2012-11-13T10:01:31.184,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:handle_cast:581]Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:info,2012-11-13T10:01:31.611,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 0 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.612,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 1 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.612,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 2 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.612,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 3 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.613,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 4 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.613,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 5 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.613,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 6 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.620,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 7 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.620,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 8 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.620,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 9 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 10 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 11 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 12 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.621,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 13 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 14 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 15 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 16 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 17 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.622,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 18 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 19 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 20 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 21 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 22 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 23 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.623,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 24 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 25 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 26 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 27 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 28 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.624,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 29 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 30 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 31 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 32 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 33 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.625,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 34 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 35 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 36 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 37 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 38 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 39 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.626,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 40 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 41 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 42 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 43 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 44 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 45 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.627,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 46 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 47 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 48 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 49 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 50 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 51 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.628,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 52 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 53 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 54 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 55 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 56 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.629,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 57 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 58 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 59 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 60 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 61 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 62 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.630,ns_1@127.0.0.1:<0.3134.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 63 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:31.632,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:info,2012-11-13T10:01:31.632,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:01:31.633,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:01:31.633,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:01:31.634,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [ns_server:info,2012-11-13T10:01:31.634,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:info,2012-11-13T10:01:31.634,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [ns_server:info,2012-11-13T10:01:31.635,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:01:31.636,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:01:31.636,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:01:31.637,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [ns_server:info,2012-11-13T10:01:31.637,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [ns_server:info,2012-11-13T10:01:31.637,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:info,2012-11-13T10:01:31.638,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:info,2012-11-13T10:01:31.638,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:info,2012-11-13T10:01:31.639,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:01:31.639,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [ns_server:info,2012-11-13T10:01:31.639,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:info,2012-11-13T10:01:31.640,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [ns_server:info,2012-11-13T10:01:31.640,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:info,2012-11-13T10:01:31.641,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:01:31.641,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:01:31.642,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:01:31.642,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [ns_server:info,2012-11-13T10:01:31.643,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:info,2012-11-13T10:01:31.643,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:01:31.643,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:01:31.644,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [ns_server:info,2012-11-13T10:01:31.644,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:01:31.645,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:01:31.645,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:01:31.646,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [ns_server:info,2012-11-13T10:01:31.646,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:info,2012-11-13T10:01:31.647,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:01:31.647,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:01:31.648,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:01:31.649,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:01:31.649,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [ns_server:info,2012-11-13T10:01:31.650,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:info,2012-11-13T10:01:31.651,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:01:31.651,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:01:31.652,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:01:31.653,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [ns_server:info,2012-11-13T10:01:31.653,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:info,2012-11-13T10:01:31.654,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:01:31.654,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:info,2012-11-13T10:01:31.655,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [ns_server:info,2012-11-13T10:01:31.656,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:01:31.657,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:01:31.657,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:01:31.658,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [ns_server:info,2012-11-13T10:01:31.659,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:info,2012-11-13T10:01:31.660,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:01:31.660,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:01:31.661,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [ns_server:info,2012-11-13T10:01:31.661,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [ns_server:info,2012-11-13T10:01:31.662,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:01:31.663,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:01:31.663,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [ns_server:info,2012-11-13T10:01:31.663,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:01:31.664,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:01:31.664,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [ns_server:info,2012-11-13T10:01:31.665,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [ns_server:info,2012-11-13T10:01:31.666,ns_1@127.0.0.1:<0.3233.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:01:31.666,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "default" [ns_server:info,2012-11-13T10:01:31.667,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:handle_call:248]Bucket "default" marked as warmed in 0 seconds [ns_server:info,2012-11-13T10:01:34.614,ns_1@127.0.0.1:ns_doctor<0.3120.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["default"] [user:info,2012-11-13T10:01:38.582,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:terminate:661]Shutting down bucket "default" on 'ns_1@127.0.0.1' for server shutdown [ns_server:info,2012-11-13T10:01:38.582,ns_1@127.0.0.1:ns_memcached-default<0.3201.0>:ns_memcached:terminate:672]This bucket shutdown is not due to bucket deletion. Doing nothing [error_logger:error,2012-11-13T10:01:38.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.3189.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:38.785,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:log:171]memcached<0.3177.0>: EOL on stdin. Initiating shutdown [ns_server:error,2012-11-13T10:01:38.786,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T10:01:38.809,ns_1@127.0.0.1:ns_doctor<0.3120.0>:ns_doctor:update_status:205]The following buckets became not ready on node 'ns_1@127.0.0.1': ["default"], those of them are active [] [ns_server:info,2012-11-13T10:01:38.858,ns_1@127.0.0.1:<0.3229.0>:mc_connection:run_loop:202]mccouch connection was normally closed [ns_server:info,2012-11-13T10:01:38.858,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:handle_info:104]Port server memcached exited with status 0 [ns_server:info,2012-11-13T10:01:38.859,ns_1@127.0.0.1:ns_port_memcached<0.3177.0>:ns_port_server:log:171]memcached<0.3177.0>: Tue Nov 13 10:01:38.822590 PST 3: Shutting down tap connections! memcached<0.3177.0>: Tue Nov 13 10:01:38.823826 PST 3: Had to wait 1154 usec for shutdown [ns_server:info,2012-11-13T10:01:38.860,ns_1@127.0.0.1:<0.3174.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:01:38.860,ns_1@127.0.0.1:<0.3174.0>:ns_port_server:log:171]moxi<0.3174.0>: EOL on stdin. Exiting [ns_server:info,2012-11-13T10:01:38.860,ns_1@127.0.0.1:mb_master<0.3130.0>:mb_master:terminate:288]Synchronously shutting down child mb_master_sup [error_logger:error,2012-11-13T10:01:38.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.3090.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:39.078,ns_1@127.0.0.1:mb_mnesia<0.2939.0>:mb_mnesia:terminate:277]Shut Mnesia down: shutdown. Exiting. [error_logger:info,2012-11-13T10:01:39.078,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mnesia exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:39.088,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.2489.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.2489.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.2487.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 377 stack_size: 24 reductions: 691 neighbours: [error_logger:error,2012-11-13T10:01:39.089,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2754.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2754.0>,<0.2755.0>,nil,<<"1352829685736678">>, <0.2751.0>,<0.2756.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2751.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2751.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2751.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/47">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/47.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.094,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2754.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 439 neighbours: [error_logger:error,2012-11-13T10:01:39.095,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2682.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2682.0>,<0.2683.0>,nil,<<"1352829685721321">>, <0.2679.0>,<0.2684.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2679.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2679.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2679.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/36">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/36.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.100,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2682.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 447 neighbours: [error_logger:error,2012-11-13T10:01:39.100,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2718.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2718.0>,<0.2719.0>,nil,<<"1352829685729409">>, <0.2715.0>,<0.2720.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2715.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2715.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2715.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/41">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/41.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.106,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2718.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 455 neighbours: [error_logger:error,2012-11-13T10:01:39.107,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2832.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2832.0>,<0.2833.0>,nil,<<"1352829685753066">>, <0.2829.0>,<0.2834.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2829.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2829.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2829.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/59">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/59.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.112,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2832.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 450 neighbours: [error_logger:error,2012-11-13T10:01:39.113,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2826.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2826.0>,<0.2827.0>,nil,<<"1352829685751685">>, <0.2823.0>,<0.2828.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2823.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2823.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2823.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/58">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/58.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.117,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2826.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 458 neighbours: [error_logger:error,2012-11-13T10:01:39.118,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2856.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2856.0>,<0.2857.0>,nil,<<"1352829685758407">>, <0.2853.0>,<0.2858.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2853.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2853.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2853.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/62">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/62.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.124,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2856.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 466 neighbours: [error_logger:error,2012-11-13T10:01:39.125,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2808.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2808.0>,<0.2809.0>,nil,<<"1352829685747757">>, <0.2805.0>,<0.2810.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2805.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/55">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/55.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.130,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2808.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 474 neighbours: [error_logger:error,2012-11-13T10:01:39.131,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2862.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2862.0>,<0.2863.0>,nil,<<"1352829685759790">>, <0.2859.0>,<0.2864.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2859.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2859.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2859.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/63">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/63.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.135,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2862.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 482 neighbours: [error_logger:error,2012-11-13T10:01:39.136,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2496.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2496.0>,<0.2497.0>,nil,<<"1352829685577250">>, <0.2493.0>,<0.2498.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.2493.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2493.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2493.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.140,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2814.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2814.0>,<0.2815.0>,nil,<<"1352829685749123">>, <0.2811.0>,<0.2816.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2811.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2811.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2811.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/56">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/56.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.143,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2496.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 309 neighbours: [error_logger:error,2012-11-13T10:01:39.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2814.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 494 neighbours: [error_logger:error,2012-11-13T10:01:39.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2850.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2850.0>,<0.2851.0>,nil,<<"1352829685756921">>, <0.2847.0>,<0.2852.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2847.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2847.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2847.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/61">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/61.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.148,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2796.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2796.0>,<0.2797.0>,nil,<<"1352829685745381">>, <0.2793.0>,<0.2798.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2793.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/53">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/53.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.152,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2796.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 422 neighbours: [error_logger:error,2012-11-13T10:01:39.154,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2850.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 506 neighbours: [error_logger:info,2012-11-13T10:01:39.154,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:info,2012-11-13T10:01:39.155,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:info,2012-11-13T10:01:39.155,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:39.156,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2640.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2640.0>,<0.2641.0>,nil,<<"1352829685711040">>, <0.2637.0>,<0.2642.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2637.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2637.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2637.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/3">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/3.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:39.235,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:error,2012-11-13T10:01:39.236,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2640.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 426 neighbours: [error_logger:error,2012-11-13T10:01:39.237,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2490.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2490.0>,<0.2491.0>,nil,<<"1352829685334923">>, <0.2487.0>,<0.2492.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.2487.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2487.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2487.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.241,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2490.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 436 neighbours: [error_logger:error,2012-11-13T10:01:39.242,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2502.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2502.0>,<0.2503.0>,nil,<<"1352829685683035">>, <0.2499.0>,<0.2504.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2499.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2499.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2499.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/0">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/0.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.245,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2502.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 554 neighbours: [error_logger:error,2012-11-13T10:01:39.246,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2712.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2712.0>,<0.2713.0>,nil,<<"1352829685727974">>, <0.2709.0>,<0.2714.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2709.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2709.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2709.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/40">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/40.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.249,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2712.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 575 neighbours: [error_logger:error,2012-11-13T10:01:39.250,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2586.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2586.0>,<0.2587.0>,nil,<<"1352829685699966">>, <0.2583.0>,<0.2588.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2583.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2583.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2583.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/21">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/21.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.253,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2586.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 570 neighbours: [error_logger:error,2012-11-13T10:01:39.254,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2784.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2784.0>,<0.2785.0>,nil,<<"1352829685742942">>, <0.2781.0>,<0.2786.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2781.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2781.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2781.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/51">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/51.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.257,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2784.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 578 neighbours: [error_logger:error,2012-11-13T10:01:39.258,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2874.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2874.0>,<0.2875.0>,nil,<<"1352829685762498">>, <0.2871.0>,<0.2876.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2871.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2871.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2871.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/8">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/8.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2874.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 586 neighbours: [error_logger:error,2012-11-13T10:01:39.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2616.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2616.0>,<0.2617.0>,nil,<<"1352829685706449">>, <0.2613.0>,<0.2618.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2613.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2613.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2613.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/26">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/26.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2616.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 594 neighbours: [error_logger:error,2012-11-13T10:01:39.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2634.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2634.0>,<0.2635.0>,nil,<<"1352829685709980">>, <0.2631.0>,<0.2636.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2631.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2631.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2631.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/29">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/29.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:info,2012-11-13T10:01:39.269,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:39.270,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2634.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 615 neighbours: [error_logger:error,2012-11-13T10:01:39.271,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2670.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2670.0>,<0.2671.0>,nil,<<"1352829685718719">>, <0.2667.0>,<0.2672.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2667.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2667.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2667.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/34">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/34.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.274,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2670.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 627 neighbours: [error_logger:error,2012-11-13T10:01:39.275,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2610.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2610.0>,<0.2611.0>,nil,<<"1352829685705115">>, <0.2607.0>,<0.2612.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2607.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2607.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2607.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/25">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/25.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.278,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2610.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 622 neighbours: [error_logger:info,2012-11-13T10:01:39.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:39.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2778.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2778.0>,<0.2779.0>,nil,<<"1352829685741611">>, <0.2775.0>,<0.2780.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2775.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2775.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2775.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/50">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/50.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.283,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2778.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 422 neighbours: [error_logger:info,2012-11-13T10:01:39.284,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:info,2012-11-13T10:01:39.284,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:error,2012-11-13T10:01:39.285,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2520.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2520.0>,<0.2521.0>,nil,<<"1352829685687483">>, <0.2517.0>,<0.2522.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2517.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2517.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2517.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/11">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/11.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.288,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2520.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 438 neighbours: [error_logger:error,2012-11-13T10:01:39.289,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2568.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2568.0>,<0.2569.0>,nil,<<"1352829685695867">>, <0.2565.0>,<0.2570.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2565.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2565.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2565.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/19">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/19.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.292,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2568.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 446 neighbours: [error_logger:error,2012-11-13T10:01:39.293,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2688.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2688.0>,<0.2689.0>,nil,<<"1352829685722740">>, <0.2685.0>,<0.2690.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2685.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2685.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2685.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/37">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/37.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.296,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2688.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 467 neighbours: [error_logger:error,2012-11-13T10:01:39.297,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2550.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2550.0>,<0.2551.0>,nil,<<"1352829685692516">>, <0.2547.0>,<0.2552.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2547.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2547.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2547.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/16">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/16.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.300,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2550.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 462 neighbours: [error_logger:error,2012-11-13T10:01:39.301,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2742.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2742.0>,<0.2743.0>,nil,<<"1352829685734113">>, <0.2739.0>,<0.2744.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2739.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2739.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2739.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/45">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/45.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.304,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2742.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 483 neighbours: [error_logger:error,2012-11-13T10:01:39.305,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2700.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2700.0>,<0.2701.0>,nil,<<"1352829685725516">>, <0.2697.0>,<0.2702.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2697.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2697.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2697.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/39">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/39.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.308,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2700.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 491 neighbours: [error_logger:error,2012-11-13T10:01:39.309,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2838.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2838.0>,<0.2839.0>,nil,<<"1352829685754116">>, <0.2835.0>,<0.2840.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2835.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2835.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2835.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/6">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/6.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.312,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2838.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 694 neighbours: [error_logger:error,2012-11-13T10:01:39.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2868.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2868.0>,<0.2869.0>,nil,<<"1352829685761171">>, <0.2865.0>,<0.2870.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2865.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2865.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2865.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/7">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/7.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.317,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.2885.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.2885.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.2883.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 729 neighbours: [error_logger:error,2012-11-13T10:01:39.319,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2868.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 702 neighbours: [error_logger:error,2012-11-13T10:01:39.319,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2532.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2532.0>,<0.2533.0>,nil,<<"1352829685689439">>, <0.2529.0>,<0.2534.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2529.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2529.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2529.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/13">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/13.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.323,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2532.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 426 neighbours: [error_logger:error,2012-11-13T10:01:39.324,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2544.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2544.0>,<0.2545.0>,nil,<<"1352829685691394">>, <0.2541.0>,<0.2546.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2541.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2541.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2541.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/15">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/15.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.328,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2544.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 434 neighbours: [error_logger:error,2012-11-13T10:01:39.329,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2766.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2766.0>,<0.2767.0>,nil,<<"1352829685738876">>, <0.2763.0>,<0.2768.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2763.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2763.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2763.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/49">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/49.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.332,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2766.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 442 neighbours: [error_logger:error,2012-11-13T10:01:39.333,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2820.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2820.0>,<0.2821.0>,nil,<<"1352829685750202">>, <0.2817.0>,<0.2822.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2817.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2817.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2817.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/57">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/57.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.336,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2820.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 450 neighbours: [error_logger:error,2012-11-13T10:01:39.337,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2598.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2598.0>,<0.2599.0>,nil,<<"1352829685702622">>, <0.2595.0>,<0.2600.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2595.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2595.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2595.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/23">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/23.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.340,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2598.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 458 neighbours: [error_logger:error,2012-11-13T10:01:39.341,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2628.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2628.0>,<0.2629.0>,nil,<<"1352829685708726">>, <0.2625.0>,<0.2630.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2625.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2625.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2625.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/28">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/28.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.345,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2628.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 479 neighbours: [error_logger:error,2012-11-13T10:01:39.346,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2652.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2652.0>,<0.2653.0>,nil,<<"1352829685713972">>, <0.2649.0>,<0.2654.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2649.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2649.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2649.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/31">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/31.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.458,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2652.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 474 neighbours: [error_logger:error,2012-11-13T10:01:39.459,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2646.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2646.0>,<0.2647.0>,nil,<<"1352829685712452">>, <0.2643.0>,<0.2648.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2643.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2643.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2643.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/30">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/30.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.462,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2646.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 482 neighbours: [error_logger:error,2012-11-13T10:01:39.463,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2622.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2622.0>,<0.2623.0>,nil,<<"1352829685707601">>, <0.2619.0>,<0.2624.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2619.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2619.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2619.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/27">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/27.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2622.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 490 neighbours: [error_logger:error,2012-11-13T10:01:39.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2802.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2802.0>,<0.2803.0>,nil,<<"1352829685746658">>, <0.2799.0>,<0.2804.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2799.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/54">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/54.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.471,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2802.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 498 neighbours: [error_logger:error,2012-11-13T10:01:39.472,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2514.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2514.0>,<0.2515.0>,nil,<<"1352829685686322">>, <0.2511.0>,<0.2516.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2511.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2511.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2511.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/10">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/10.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.475,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2514.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 506 neighbours: [error_logger:error,2012-11-13T10:01:39.476,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2772.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2772.0>,<0.2773.0>,nil,<<"1352829685740221">>, <0.2769.0>,<0.2774.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2769.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2769.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2769.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/5">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/5.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.479,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2772.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 514 neighbours: [error_logger:error,2012-11-13T10:01:39.480,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2880.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2880.0>,<0.2881.0>,nil,<<"1352829685763855">>, <0.2877.0>,<0.2882.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2877.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/9">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/9.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.483,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2880.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 522 neighbours: [error_logger:error,2012-11-13T10:01:39.484,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2538.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2538.0>,<0.2539.0>,nil,<<"1352829685690418">>, <0.2535.0>,<0.2540.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2535.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2535.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2535.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/14">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/14.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.488,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2538.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 530 neighbours: [error_logger:error,2012-11-13T10:01:39.488,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2508.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2508.0>,<0.2509.0>,nil,<<"1352829685684645">>, <0.2505.0>,<0.2510.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2505.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2505.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2505.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/1">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/1.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.492,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2508.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 538 neighbours: [error_logger:error,2012-11-13T10:01:39.493,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2844.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2844.0>,<0.2845.0>,nil,<<"1352829685755504">>, <0.2841.0>,<0.2846.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2841.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2841.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2841.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/60">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/60.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.496,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2844.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 546 neighbours: [error_logger:error,2012-11-13T10:01:39.497,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2562.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2562.0>,<0.2563.0>,nil,<<"1352829685694660">>, <0.2559.0>,<0.2564.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2559.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2559.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2559.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/18">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/18.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.500,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2562.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 554 neighbours: [error_logger:error,2012-11-13T10:01:39.501,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2676.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2676.0>,<0.2677.0>,nil,<<"1352829685719901">>, <0.2673.0>,<0.2678.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2673.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2673.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2673.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/35">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/35.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2676.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 575 neighbours: [error_logger:error,2012-11-13T10:01:39.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2526.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2526.0>,<0.2527.0>,nil,<<"1352829685688455">>, <0.2523.0>,<0.2528.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2523.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2523.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2523.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/12">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/12.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.508,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2724.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2724.0>,<0.2725.0>,nil,<<"1352829685730532">>, <0.2721.0>,<0.2726.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2721.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2721.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2721.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/42">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/42.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.512,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2526.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 570 neighbours: [error_logger:error,2012-11-13T10:01:39.513,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2724.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 587 neighbours: [error_logger:error,2012-11-13T10:01:39.514,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2760.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2760.0>,<0.2761.0>,nil,<<"1352829685737859">>, <0.2757.0>,<0.2762.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2757.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2757.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2757.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/48">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/48.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2760.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 599 neighbours: [error_logger:error,2012-11-13T10:01:39.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2706.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2706.0>,<0.2707.0>,nil,<<"1352829685726622">>, <0.2703.0>,<0.2708.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2703.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2703.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2703.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/4">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/4.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.522,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2706.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 594 neighbours: [error_logger:error,2012-11-13T10:01:39.523,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2694.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2694.0>,<0.2695.0>,nil,<<"1352829685723978">>, <0.2691.0>,<0.2696.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2691.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2691.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2691.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/38">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/38.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.526,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2694.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 602 neighbours: [error_logger:error,2012-11-13T10:01:39.527,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2556.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2556.0>,<0.2557.0>,nil,<<"1352829685693528">>, <0.2553.0>,<0.2558.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2553.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2553.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2553.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/17">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/17.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.530,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2556.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 610 neighbours: [error_logger:error,2012-11-13T10:01:39.531,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2748.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2748.0>,<0.2749.0>,nil,<<"1352829685735209">>, <0.2745.0>,<0.2750.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2745.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2745.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2745.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/46">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/46.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.535,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2748.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 631 neighbours: [error_logger:error,2012-11-13T10:01:39.536,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2574.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2574.0>,<0.2575.0>,nil,<<"1352829685697361">>, <0.2571.0>,<0.2576.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2571.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2571.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2571.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/2">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/2.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.539,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2736.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2736.0>,<0.2737.0>,nil,<<"1352829685732765">>, <0.2733.0>,<0.2738.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2733.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2733.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2733.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/44">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/44.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.542,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2574.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 626 neighbours: [error_logger:error,2012-11-13T10:01:39.544,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2736.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 643 neighbours: [error_logger:error,2012-11-13T10:01:39.544,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2790.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2790.0>,<0.2791.0>,nil,<<"1352829685744125">>, <0.2787.0>,<0.2792.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2787.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2787.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2787.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/52">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/52.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.548,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2790.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 642 neighbours: [error_logger:error,2012-11-13T10:01:39.549,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2886.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2886.0>,<0.2887.0>,nil,<<"1352829685765663">>, <0.2883.0>,<0.2888.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/master">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/master.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.551,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2580.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2580.0>,<0.2581.0>,nil,<<"1352829685698636">>, <0.2577.0>,<0.2582.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2577.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2577.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2577.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/20">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/20.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2886.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 538 neighbours: [error_logger:error,2012-11-13T10:01:39.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2580.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 654 neighbours: [error_logger:error,2012-11-13T10:01:39.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2592.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2592.0>,<0.2593.0>,nil,<<"1352829685701266">>, <0.2589.0>,<0.2594.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2589.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2589.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2589.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/22">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/22.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.560,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2658.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2658.0>,<0.2659.0>,nil,<<"1352829685715431">>, <0.2655.0>,<0.2660.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2655.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2655.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2655.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/32">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/32.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.563,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2658.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 670 neighbours: [error_logger:error,2012-11-13T10:01:39.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2664.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2664.0>,<0.2665.0>,nil,<<"1352829685717309">>, <0.2661.0>,<0.2666.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2661.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2661.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2661.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/33">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/33.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.568,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2664.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 678 neighbours: [error_logger:error,2012-11-13T10:01:39.568,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2604.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2604.0>,<0.2605.0>,nil,<<"1352829685703745">>, <0.2601.0>,<0.2606.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2601.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2601.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2601.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/24">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/24.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2604.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 686 neighbours: [error_logger:error,2012-11-13T10:01:39.574,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2592.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 666 neighbours: [error_logger:error,2012-11-13T10:01:39.575,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2730.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2730.0>,<0.2731.0>,nil,<<"1352829685731704">>, <0.2727.0>,<0.2732.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2727.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2727.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2727.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/43">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/43.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2730.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 711 neighbours: [error_logger:info,2012-11-13T10:01:39.581,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.3300.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/capi.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/default.d/geocouch.ini", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchdb/local.ini", "/Users/farshid/Library/Preferences/couchbase-server.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-platform.ini", "/Users/farshid/Library/Application Support/Couchbase/etc/couch-custom.ini"], <0.3300.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.583,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3303.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.584,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3304.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3305.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.586,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3306.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.813,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3307.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.814,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3711.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.816,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3712.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.817,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3713.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.818,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3714.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.819,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3717.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.821,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3718.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.822,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.3302.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.824,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3720.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.825,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3721.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.826,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3723.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.827,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3725.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.828,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3727.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.830,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3728.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.831,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3730.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.832,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3732.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.833,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3734.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.3751.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.836,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.3719.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:39.838,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.3301.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:39.839,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:25]OS type: {unix,darwin} Version: {12,2,0} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:4:4] [rq:4] [async-threads:16] [kernel-poll:false]\n"}, {system_arch_raw,"i386-apple-darwin11.4.0"}, {system_arch,"i386-apple-darwin11.4.0"}, {localtime,{{2012,11,13},{10,1,39}}}, {memory, [{total,31876640}, {processes,12139136}, {processes_used,11985520}, {system,19737504}, {atom,1397745}, {atom_used,1366130}, {binary,560080}, {code,13855624}, {ets,1928712}]}, {loaded, [capi_frontend,capi_spatial,qlc,mb_map,ns_rebalancer, ns_janitor_map_recoverer,stats_collector, couch_stats_reader,ns_vbm_sup,ns_vbm_new_sup, tap_replication_manager,mc_connection, capi_ddoc_replication_srv,capi_set_view_manager,mc_binary, single_bucket_sup,janitor_agent,mc_client_binary, ns_janitor,menelaus_web_remote_clusters,lib,mochinum, capi_utils,mochiweb_mime,mochiweb_io,mb_grid,mochijson2, set_view_update_daemon,mochiweb_response, xdc_rdoc_replication_srv,menelaus_web_buckets, compaction_daemon,menelaus_auth,stats_archiver, mochiweb_util,couch_httpd_view,system_stats_collector, mochiweb_request,couch_changes,ns_bucket_sup, mochiweb_headers,mochiweb,couch_set_view_util, geocouch_duplicates,xdc_rep_manager,ns_cluster_membership, ns_memcached_log_rotator,ns_port_server, supervisor_cushion,ns_port_init,ns_moxi_sup,ns_port_sup, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_util,menelaus_deps,menelaus_web,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,failover_safeness_level, mb_master_sup,cluster_compat_mode,gen_fsm, samples_loader_tasks,mb_master,xdc_replication_sup, remote_clusters_info,ns_bucket,ns_doctor,stats_reader, ns_heart,ns_mail_log,ns_mail_sup,work_queue, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, ns_node_disco,ns_node_disco_sup,ns_config_ets_dup,random, ns_log,ns_server_sup,ns_process_registry, cb_config_couch_sync,ns_config_log,ns_memcached,ns_pubsub, ns_config_isasl_sync,ns_config_replica,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, mnesia_index,mnesia_loader,file_sorter,dets_v9,dets_utils, dets_sup,dets_server,dets,mnesia_log,mnesia_snmp_hook, mnesia_checkpoint,mnesia_late_loader,mnesia_dumper, mnesia_snmp_sup,mnesia_checkpoint_sup,mnesia_frag, mnesia_tm,mnesia_recover,mnesia_sp,mnesia_locker, mnesia_event,mnesia_kernel_sup,mnesia_sup,mnesia_bup, mnesia_schema,mnesia_controller,mnesia_lib,mnesia_monitor, mnesia_subscr,mnesia,mb_mnesia,mb_mnesia_sup,ns_cluster, ns_cookie_manager,erl_epmd,inet_tcp_dist,gen_udp, inet_gethost_native,dist_manager,timeout_diag_logger, path_config,diag_handler,auth,ns_info,log_os_info, couch_config_writer,cb_init_loggers,mochiweb_acceptor, inet_tcp,gen_tcp,mochiweb_socket,mochiweb_socket_server, mochilists,mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view,snappy, couch_compress,couch_spatial_validation, couch_set_view_mapreduce,ejson,couch_doc, couch_db_update_notifier,couch_btree,couch_ref_counter, couch_uuids,couch_db_updater,couch_db,couch_auth_cache, couch_db_update_notifier_sup,couch_secondary_sup, couch_index_barrier,couch_event_sup,couch_log, couch_rep_sup,httpd_util,filelib,couch_file, couch_file_write_guard,couch_task_status,erl_ddll, couch_drv,couch_primary_sup,couch_server,string,re,file2, couch_util,couch_config,couch_server_sup,mochiweb_sup, mochiweb_app,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, ale_default_formatter,ale_stderr_sink,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views','ale_logger-cluster', 'ale_logger-rebalance','ale_logger-stats', 'ale_logger-ns_doctor','ale_logger-menelaus', 'ale_logger-user','ale_logger-ns_server', 'ale_logger-couchdb',ns_log_sink,disk_log_sup, disk_log_server,disk_log_1,disk_log,timer,ale_disk_sink, io_lib_fread,ns_server,cpu_sup,memsup,disksup,os_mon, sasl_report,release_handler,calendar,overload, alarm_handler,log_mf_h,sasl_report_tty_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal, compile,dynamic_compile,ale_utils,io_lib_pretty, io_lib_format,ale_codegen,io_lib,ale,io,ale_dynamic_sup, sets,ale_sup,dict,ale_app,ordsets,erl_lint,ram_file, beam_lib,ns_bootstrap,file_io_server,orddict,erl_eval, file,c,error_logger_tty_h,kernel_config,queue,shell,user, user_drv,user_sup,supervisor_bridge,standard_error, unicode,binary,ets,gb_sets,hipe_unified_loader,packages, code_server,code,file_server,net_kernel,global_group, erl_distribution,filename,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8cffe61"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-00105ea-git"}, {mnesia,"MNESIA CXC 138 12","4.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-00105ea-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-00105ea-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.0.0-1949-rel-community"}, {mochiweb,"MochiMedia Web Server","1.4.1"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,560}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, ['sink-disk_xdcr','sink-disk_debug',ns_server_cluster_sup, 'sink-disk_couchdb','sink-disk_mapreduce_errors', couch_auth_cache,'sink-disk_views',erl_epmd, 'sink-disk_error',disk_log_sup,disk_log_server, code_server,application_controller,error_logger, couch_set_view,ale_sup,lhttpc_sup,ale_dynamic_sup, mochiweb_sup,auth,standard_error_sup,os_cmd_port_creator, kernel_safe_sup,lhttpc_manager,tftp_sup, couch_set_view_ddoc_cache,os_mon_sup, couch_index_merger_connection_pool,cpu_sup,couch_spatial, memsup,disksup,timer_server,couch_replica_index_barrier, couch_main_index_barrier,net_kernel,couch_replication, dist_manager,couch_task_events,rex,net_sup,couch_log, kernel_sup,global_name_server,file_server_2,cb_couch_sup, httpd_sup,ssl_connection_sup,'sink-disk_default', ssl_manager,ssl_broker_sup,ssl_server,sasl_safe_sup, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,ftp_sup,inets_sup,crypto_server, crypto_sup,sasl_sup,couch_secondary_services, couch_primary_services,couch_db_update, inet_gethost_native_sup,release_handler,couch_view, couch_uuids,overload,couch_task_status,alarm_handler, couch_server_sup,couch_server,dets_sup,dets,'sink-stderr', erl_prim_loader,couch_rep_sup,couch_query_servers, standard_error,init,couch_httpd,couch_file_write_guard, inet_gethost_native,couch_drv,inet_db, couch_db_update_notifier_sup,user,'sink-ns_log', couch_config,global_group,'sink-disk_stats', 'sink-disk_xdcr_errors']}, {cookie,bptrojzpwfmfrqou}, {wordsize,8}, {wall_clock,326}] [ns_server:info,2012-11-13T10:01:39.851,ns_1@127.0.0.1:ns_server_cluster_sup<0.196.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ",""] [error_logger:error,2012-11-13T10:01:39.860,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.3714.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.864,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3754.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.865,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3755.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3756.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.867,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.3758.0>}, {name,mb_mnesia_events}, {mfargs, {gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.868,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.3753.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.869,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.3770.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.870,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3772.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.872,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3773.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.873,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3774.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:39.874,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3775.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3776.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.588,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3798.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:40.589,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3799.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:40.590,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3800.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.592,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_kernel_sup} started: [{pid,<0.3801.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.593,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mnesia_sup} started: [{pid,<0.3771.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:40.594,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= application: mnesia started_at: 'ns_1@127.0.0.1' [ns_server:info,2012-11-13T10:01:40.834,ns_1@127.0.0.1:ns_config_sup<0.3873.0>:ns_config_sup:init:32]loading static ns_config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:40.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_mnesia_sup} started: [{pid,<0.3759.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.836,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3757.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:40.837,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3874.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:40.838,ns_1@127.0.0.1:ns_config<0.3876.0>:ns_config:load_config:674]Loading static config from "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config" [error_logger:info,2012-11-13T10:01:40.838,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3875.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:40.839,ns_1@127.0.0.1:ns_config<0.3876.0>:ns_config:load_config:688]Loading dynamic config from "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/config/config.dat" [ns_server:info,2012-11-13T10:01:40.851,ns_1@127.0.0.1:ns_config<0.3876.0>:ns_config:load_config:706]Here's full dynamic config we loaded + static & default config: [{auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {enabled,false}, {timeout,30}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets, [{'_vclock',[{'ns_1@127.0.0.1',{3,63520048819}}]}, {configs, [{"default", [{uuid,<<"6b60cd8faa90ee793de6eca507b18b63">>}, {sasl_password,[]}, {num_replicas,1}, {replica_index,false}, {ram_quota,2507145216}, {auth_type,sasl}, {flush_enabled,false}, {type,membase}, {num_vbuckets,64}, {servers,['ns_1@127.0.0.1']}, {map, [['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}]}]}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048581}}]},2,0]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{4,63520048581}}]},2,0]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]},{pass,[]},{host,"localhost"},{port,25},{encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,2391}, {nodes_wanted,['ns_1@127.0.0.1']}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048580}}]}, {cookie,bptrojzpwfmfrqou}]}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {settings, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048873}}]}, {stats,[{send_stats,false}]}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048589}}]}| <<"b34a9c2e03786d913446a4e84919e1d5">>]}, {vbucket_map_history, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048819}}]}, {[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{max_slaves,10}]}]}, {xdcr_capi_checkpoint_timeout,10}, {xdcr_checkpoint_interval,1800}, {xdcr_connection_timeout,60}, {xdcr_doc_batch_size_kb,512}, {xdcr_failure_restart_interval,30}, {xdcr_num_http_connections,20}, {xdcr_num_retries_per_request,2}, {xdcr_num_worker_process,4}, {xdcr_worker_batch_size,100}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{5,63520048579}}]}|{2,0}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'_',{1,0}},{'ns_1@127.0.0.1',{2,63520048579}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,10}, {log_cyclesize,104857600}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63520048579}}]}, {filename, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{2,63520048579}}]}, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", {"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}] [error_logger:info,2012-11-13T10:01:40.864,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3876.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:40.865,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3878.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T10:01:40.866,ns_1@127.0.0.1:ns_config_isasl_sync<0.3879.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T10:01:41.867,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3879.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.868,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3882.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.3884.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.872,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3873.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:41.873,ns_1@127.0.0.1:ns_node_disco_events<0.3893.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@127.0.0.1'] [error_logger:info,2012-11-13T10:01:41.873,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3886.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.875,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3888.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3889.0>}, {name,ns_config_ets_dup}, {mfargs,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.878,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3890.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.880,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3893.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.881,ns_1@127.0.0.1:remote_clusters_info<0.3919.0>:remote_clusters_info:read_or_create_table:384]Reading remote_clusters_info content from /Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/remote_clusters_cache [error_logger:info,2012-11-13T10:01:41.884,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3894.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:error,2012-11-13T10:01:41.885,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:info,2012-11-13T10:01:41.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3897.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3898.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [user:info,2012-11-13T10:01:41.889,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:init:89]I'm the only node, so I'm the master. [error_logger:info,2012-11-13T10:01:41.890,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3899.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.891,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [error_logger:info,2012-11-13T10:01:41.892,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.3900.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.894,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3892.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [user:info,2012-11-13T10:01:41.897,ns_1@127.0.0.1:ns_server_sup<0.3887.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2012-11-13T10:01:41.898,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3903.0>}, {name,vbucket_map_mirror}, {mfargs,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.899,ns_1@127.0.0.1:<0.3966.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2012-11-13T10:01:41.902,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3907.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.906,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3908.0>}, {name,mb_master_events}, {mfargs, {gen_event,start_link,[{local,mb_master_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.908,ns_1@127.0.0.1:<0.3974.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2012-11-13T10:01:41.912,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3909.0>}, {name,buckets_events}, {mfargs, {gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.3911.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.923,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3910.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:41.925,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3912.0>}, {name,ns_stats_event}, {mfargs, {gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.931,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3913.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.933,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3916.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.936,ns_1@127.0.0.1:set_view_update_daemon<0.4007.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2012-11-13T10:01:41.936,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3919.0>}, {name,remote_clusters_info}, {mfargs,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2012-11-13T10:01:41.938,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:connect:1103]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [error_logger:info,2012-11-13T10:01:41.938,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3929.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.934,ns_1@127.0.0.1:<0.4005.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [error_logger:info,2012-11-13T10:01:41.946,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3934.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.948,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.3935.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.949,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3926.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:41.951,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3936.0>}, {name,master_activity_events}, {mfargs, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.954,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3937.0>}, {name,master_activity_events_ingress}, {mfargs, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.955,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3938.0>}, {name,master_activity_events_timestamper}, {mfargs, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.957,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3939.0>}, {name,master_activity_events_pids_watcher}, {mfargs, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:41.961,ns_1@127.0.0.1:<0.4005.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [error_logger:info,2012-11-13T10:01:41.961,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3940.0>}, {name,master_activity_events_keeper}, {mfargs,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3943.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.967,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3960.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.971,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3961.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.973,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.3962.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:41.986,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3942.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.009,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3964.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.013,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3965.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.018,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.3966.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.019,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3963.0>}, {name,mc_sup}, {mfargs,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.020,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3968.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.025,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3969.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.036,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.3971.0>}, {name, {memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000","-p", "11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {erlang,apply, [#Fun, [memcached, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/memcached", ["-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/stdin_term_handler.so", "-X", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/file_logger.so,cyclesize=104857600;sleeptime=19;filename=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/logs/memcached.log", "-l","0.0.0.0:11210,0.0.0.0:11209:1000", "-p","11210","-E", "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchbase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.039,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3967.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.041,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3973.0>}, {name,ns_port_memcached_killer}, {mfargs,{ns_port_sup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.042,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3974.0>}, {name,ns_memcached_log_rotator}, {mfargs,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.043,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3977.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.044,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3979.0>}, {name,xdc_replication_sup}, {mfargs,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.045,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3980.0>}, {name,xdc_rep_manager}, {mfargs,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.046,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3984.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.048,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3983.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.049,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.3986.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.050,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3985.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.052,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3994.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.053,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3996.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.055,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3997.0>}, {name,ns_moxi_sup_work_queue}, {mfargs, {work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.056,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.3998.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.057,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.3988.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.059,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4000.0>}, {name,compaction_daemon}, {mfargs, {supervisor_cushion,start_link, [compaction_daemon,3000,1000,compaction_daemon, start_link,[]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.060,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4004.0>}, {name,xdc_rdoc_replication_srv}, {mfargs,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.066,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4007.0>}, {name,set_view_update_daemon}, {mfargs,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.071,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.4011.0>}, {name,samples_loader_tasks}, {mfargs,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.073,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.3887.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:42.961,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:ensure_bucket:1119]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=2507145216;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:01:42.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4001.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.966,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4024.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.967,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4027.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:01:42.968,ns_1@127.0.0.1:janitor_agent-default<0.4029.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2012-11-13T10:01:42.969,ns_1@127.0.0.1:janitor_agent-default<0.4029.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:01:42.969,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4028.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:01:42.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4029.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.979,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4030.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.981,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4031.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4033.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.984,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4035.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:01:42.985,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.4036.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:01:43.151,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:01:42.951086 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:01:42.952552 PST 3: Connected to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:01:42.960960 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.3972.0>: Tue Nov 13 10:01:42.965552 PST 3: Failed to load mutation log, falling back to key dump memcached<0.3972.0>: Tue Nov 13 10:01:42.972721 PST 3: metadata loaded in 18 ms memcached<0.3972.0>: Tue Nov 13 10:01:42.976599 PST 3: warmup completed in 21 ms [user:info,2012-11-13T10:01:43.433,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_cast:581]Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:info,2012-11-13T10:01:43.895,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 0 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.895,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 1 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.895,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 2 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.896,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 3 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.896,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 4 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.896,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 5 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.897,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 6 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.897,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 7 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.897,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 8 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.903,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 9 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.903,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 10 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 11 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 12 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 13 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 14 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.904,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 15 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 16 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 17 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 18 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 19 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.905,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 20 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 21 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 22 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 23 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 24 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 25 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.906,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 26 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 27 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 28 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 29 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.907,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 30 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 31 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 32 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 33 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 34 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.908,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 35 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 36 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 37 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 38 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 39 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.909,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 40 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 41 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 42 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 43 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 44 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.910,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 45 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 46 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 47 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 48 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 49 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 50 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.911,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 51 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 52 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 53 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 54 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 55 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.912,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 56 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.913,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 57 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.913,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 58 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.913,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 59 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 60 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 61 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 62 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.914,ns_1@127.0.0.1:<0.3930.0>:ns_janitor:do_sanify_chain:258]Setting vbucket 63 in "default" on 'ns_1@127.0.0.1' from dead to active. [ns_server:info,2012-11-13T10:01:43.916,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:info,2012-11-13T10:01:43.917,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:01:43.917,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:01:43.918,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:01:43.918,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [ns_server:info,2012-11-13T10:01:43.919,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:info,2012-11-13T10:01:43.920,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [ns_server:info,2012-11-13T10:01:43.920,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:01:43.921,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:01:43.921,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:01:43.922,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [ns_server:info,2012-11-13T10:01:43.922,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [ns_server:info,2012-11-13T10:01:43.923,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:info,2012-11-13T10:01:43.923,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:info,2012-11-13T10:01:43.923,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:info,2012-11-13T10:01:43.924,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:01:43.924,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [ns_server:info,2012-11-13T10:01:43.925,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:info,2012-11-13T10:01:43.925,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [ns_server:info,2012-11-13T10:01:43.926,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:info,2012-11-13T10:01:43.926,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:01:43.926,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:01:43.927,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:01:43.927,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [ns_server:info,2012-11-13T10:01:43.928,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:info,2012-11-13T10:01:43.929,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:01:43.930,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:01:43.931,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [ns_server:info,2012-11-13T10:01:43.932,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:01:43.933,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:01:43.934,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:01:43.935,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [ns_server:info,2012-11-13T10:01:43.936,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:info,2012-11-13T10:01:43.936,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:01:43.937,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:01:43.937,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:01:43.938,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:01:43.939,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [ns_server:info,2012-11-13T10:01:43.940,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:info,2012-11-13T10:01:43.941,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:01:43.941,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:01:43.942,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:01:43.943,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [ns_server:info,2012-11-13T10:01:43.943,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:info,2012-11-13T10:01:43.945,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:01:43.946,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:info,2012-11-13T10:01:43.947,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [ns_server:info,2012-11-13T10:01:43.948,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:01:43.949,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:01:43.949,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:01:43.950,ns_1@127.0.0.1:<0.4021.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [ns_server:info,2012-11-13T10:01:43.952,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:info,2012-11-13T10:01:43.953,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:01:43.954,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:01:43.954,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [ns_server:info,2012-11-13T10:01:43.956,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [ns_server:info,2012-11-13T10:01:43.957,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:01:43.958,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:01:43.959,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [ns_server:info,2012-11-13T10:01:43.960,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:01:43.960,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:01:43.962,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [ns_server:info,2012-11-13T10:01:43.963,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [ns_server:info,2012-11-13T10:01:43.964,ns_1@127.0.0.1:<0.4023.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:01:43.965,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "default" [ns_server:info,2012-11-13T10:01:43.966,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_call:248]Bucket "default" marked as warmed in 1 seconds [ns_server:info,2012-11-13T10:01:46.906,ns_1@127.0.0.1:ns_doctor<0.3916.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["default"] [ns_server:info,2012-11-13T10:02:11.016,ns_1@127.0.0.1:<0.4165.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:02:11.017,ns_1@127.0.0.1:<0.4165.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:warn,2012-11-13T10:02:50.917,ns_1@127.0.0.1:menelaus_web_alerts_srv<0.3962.0>:menelaus_web_alerts_srv:handle_info:172]Eaten 5 previously unconsumed check_alerts [ns_server:warn,2012-11-13T10:02:50.918,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:handle_info:218]Skipped 7 heartbeats [stats:warn,2012-11-13T10:02:50.920,ns_1@127.0.0.1:system_stats_collector<0.3985.0>:system_stats_collector:handle_info:133]lost 16 ticks [ns_server:info,2012-11-13T10:02:50.922,ns_1@127.0.0.1:<0.4290.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:warn,2012-11-13T10:03:12.532,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:handle_info:218]Skipped 1 heartbeats [ns_server:info,2012-11-13T10:03:12.532,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:handle_info:282]Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.4284.0>} [ns_server:error,2012-11-13T10:03:12.532,ns_1@127.0.0.1:<0.4284.0>:ns_janitor:cleanup_with_states:92]Bucket "default" not yet ready on ['ns_1@127.0.0.1'] [stats:warn,2012-11-13T10:03:12.533,ns_1@127.0.0.1:system_stats_collector<0.3985.0>:system_stats_collector:handle_info:133]lost 15 ticks [ns_server:info,2012-11-13T10:03:12.533,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:handle_info:282]Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.4284.0>} [ns_server:warn,2012-11-13T10:03:12.533,ns_1@127.0.0.1:mb_master<0.3926.0>:mb_master:handle_info:218]Skipped 8 heartbeats [ns_server:info,2012-11-13T10:03:12.533,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:handle_info:282]Skipping janitor in state janitor_running: {janitor_state, ["default"], <0.4284.0>} [stats:warn,2012-11-13T10:03:14.782,ns_1@127.0.0.1:system_stats_collector<0.3985.0>:system_stats_collector:handle_info:133]lost 6 ticks [ns_server:error,2012-11-13T10:03:15.007,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_info:630]handle_info(ensure_bucket,..) took too long: 24090341 us [ns_server:info,2012-11-13T10:03:15.407,ns_1@127.0.0.1:<0.4290.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [stats:warn,2012-11-13T10:03:15.410,ns_1@127.0.0.1:<0.4031.0>:stats_collector:latest_tick:223]Dropped 40 ticks [ns_server:info,2012-11-13T10:03:20.418,ns_1@127.0.0.1:<0.4336.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:03:20.419,ns_1@127.0.0.1:<0.4336.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:03:50.431,ns_1@127.0.0.1:<0.4469.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:03:50.432,ns_1@127.0.0.1:<0.4469.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:04:20.444,ns_1@127.0.0.1:<0.4605.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:04:20.445,ns_1@127.0.0.1:<0.4605.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:04:50.456,ns_1@127.0.0.1:<0.4759.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:04:50.457,ns_1@127.0.0.1:<0.4759.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:05:20.468,ns_1@127.0.0.1:<0.4895.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:05:20.469,ns_1@127.0.0.1:<0.4895.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:05:50.479,ns_1@127.0.0.1:<0.5028.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:05:50.481,ns_1@127.0.0.1:<0.5028.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:06:20.494,ns_1@127.0.0.1:<0.5164.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:06:20.495,ns_1@127.0.0.1:<0.5164.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:06:50.508,ns_1@127.0.0.1:<0.5297.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:06:50.509,ns_1@127.0.0.1:<0.5297.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:07:20.522,ns_1@127.0.0.1:<0.5433.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:07:20.524,ns_1@127.0.0.1:<0.5433.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:07:50.536,ns_1@127.0.0.1:<0.5587.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:07:50.537,ns_1@127.0.0.1:<0.5587.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:08:20.548,ns_1@127.0.0.1:<0.5723.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:08:20.549,ns_1@127.0.0.1:<0.5723.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:08:50.563,ns_1@127.0.0.1:<0.5856.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:08:50.565,ns_1@127.0.0.1:<0.5856.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:09:20.576,ns_1@127.0.0.1:<0.5992.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:09:20.578,ns_1@127.0.0.1:<0.5992.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:09:50.588,ns_1@127.0.0.1:<0.6125.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:09:50.589,ns_1@127.0.0.1:<0.6125.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:10:20.601,ns_1@127.0.0.1:<0.6261.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:10:20.602,ns_1@127.0.0.1:<0.6261.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:10:50.614,ns_1@127.0.0.1:<0.6419.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:10:50.616,ns_1@127.0.0.1:<0.6419.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:11:20.628,ns_1@127.0.0.1:<0.6555.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:11:20.629,ns_1@127.0.0.1:<0.6555.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:11:50.642,ns_1@127.0.0.1:<0.6688.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:11:50.643,ns_1@127.0.0.1:<0.6688.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:12:20.654,ns_1@127.0.0.1:<0.6824.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:12:20.655,ns_1@127.0.0.1:<0.6824.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:12:50.666,ns_1@127.0.0.1:<0.6957.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:12:50.667,ns_1@127.0.0.1:<0.6957.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:13:20.677,ns_1@127.0.0.1:<0.7093.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:13:20.679,ns_1@127.0.0.1:<0.7093.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:13:50.690,ns_1@127.0.0.1:<0.7247.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:13:50.691,ns_1@127.0.0.1:<0.7247.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:14:20.704,ns_1@127.0.0.1:<0.7383.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:14:20.706,ns_1@127.0.0.1:<0.7383.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:14:50.717,ns_1@127.0.0.1:<0.7516.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:14:50.718,ns_1@127.0.0.1:<0.7516.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:15:20.730,ns_1@127.0.0.1:<0.7652.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:15:20.732,ns_1@127.0.0.1:<0.7652.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:15:50.743,ns_1@127.0.0.1:<0.7785.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:15:50.745,ns_1@127.0.0.1:<0.7785.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:16:20.757,ns_1@127.0.0.1:<0.7921.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:16:20.759,ns_1@127.0.0.1:<0.7921.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:16:50.769,ns_1@127.0.0.1:<0.8079.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:16:50.770,ns_1@127.0.0.1:<0.8079.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:17:20.783,ns_1@127.0.0.1:<0.8215.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:17:20.784,ns_1@127.0.0.1:<0.8215.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:17:50.796,ns_1@127.0.0.1:<0.8348.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:17:50.798,ns_1@127.0.0.1:<0.8348.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:18:20.812,ns_1@127.0.0.1:<0.8484.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:18:20.813,ns_1@127.0.0.1:<0.8484.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:18:50.827,ns_1@127.0.0.1:<0.8617.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:18:50.828,ns_1@127.0.0.1:<0.8617.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:19:20.840,ns_1@127.0.0.1:<0.8753.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:19:20.842,ns_1@127.0.0.1:<0.8753.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:19:50.854,ns_1@127.0.0.1:<0.8907.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:19:50.856,ns_1@127.0.0.1:<0.8907.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:20:20.866,ns_1@127.0.0.1:<0.9043.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:20:20.867,ns_1@127.0.0.1:<0.9043.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:20:50.879,ns_1@127.0.0.1:<0.9176.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:20:50.881,ns_1@127.0.0.1:<0.9176.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:21:20.896,ns_1@127.0.0.1:<0.9312.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:21:20.897,ns_1@127.0.0.1:<0.9312.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:21:50.910,ns_1@127.0.0.1:<0.9447.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:21:50.911,ns_1@127.0.0.1:<0.9447.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:22:20.923,ns_1@127.0.0.1:<0.9583.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:22:20.925,ns_1@127.0.0.1:<0.9583.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:22:50.936,ns_1@127.0.0.1:<0.9741.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:22:50.937,ns_1@127.0.0.1:<0.9741.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [menelaus:info,2012-11-13T10:23:04.205,ns_1@127.0.0.1:<0.3950.0>:menelaus_web:loop:369]Invalid delete received: {mochiweb_request,#Port<0.12920>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] [ns_server:info,2012-11-13T10:23:20.950,ns_1@127.0.0.1:<0.9884.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:23:20.952,ns_1@127.0.0.1:<0.9884.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:23:50.964,ns_1@127.0.0.1:<0.10017.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:23:50.965,ns_1@127.0.0.1:<0.10017.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:24:08.828,ns_1@127.0.0.1:ns_config_events<0.3874.0>:ns_port_sup:terminate_port:129]unsupervising port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:info,2012-11-13T10:24:08.830,ns_1@127.0.0.1:<0.3970.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:24:08.830,ns_1@127.0.0.1:<0.3970.0>:ns_port_server:log:171]moxi<0.3970.0>: EOL on stdin. Exiting [ns_server:info,2012-11-13T10:24:08.830,ns_1@127.0.0.1:ns_config_events<0.3874.0>:ns_port_sup:launch_port:74]supervising port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:info,2012-11-13T10:24:08.834,ns_1@127.0.0.1:ns_config_log<0.3882.0>:ns_config_log:handle_info:57]config change: rest_creds -> ******** [error_logger:info,2012-11-13T10:24:08.835,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.10104.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:24:09.041,ns_1@127.0.0.1:<0.10105.0>:ns_port_server:log:171]moxi<0.10105.0>: 2012-11-13 10:24:08: (cproxy_config.c.317) env: MOXI_SASL_PLAIN_USR (13) moxi<0.10105.0>: 2012-11-13 10:24:08: (cproxy_config.c.326) env: MOXI_SASL_PLAIN_PWD (8) [ns_server:info,2012-11-13T10:24:20.978,ns_1@127.0.0.1:<0.10165.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:24:20.978,ns_1@127.0.0.1:<0.10165.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:24:50.990,ns_1@127.0.0.1:<0.10298.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:24:50.992,ns_1@127.0.0.1:<0.10298.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:25:20.004,ns_1@127.0.0.1:<0.10433.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:25:20.005,ns_1@127.0.0.1:<0.10433.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:25:50.016,ns_1@127.0.0.1:<0.10588.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:25:50.017,ns_1@127.0.0.1:<0.10588.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:26:20.028,ns_1@127.0.0.1:<0.10726.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:26:20.029,ns_1@127.0.0.1:<0.10726.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:26:50.040,ns_1@127.0.0.1:<0.10861.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:26:50.041,ns_1@127.0.0.1:<0.10861.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [menelaus:info,2012-11-13T10:26:53.260,ns_1@127.0.0.1:<0.3958.0>:menelaus_web:loop:369]Invalid delete received: {mochiweb_request,#Port<0.13316>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] [menelaus:info,2012-11-13T10:26:58.156,ns_1@127.0.0.1:<0.10858.0>:menelaus_web:loop:369]Invalid delete received: {mochiweb_request,#Port<0.13324>,'DELETE', "/pools/default/buckets/", {1,1}, {4, {"host", {'Host',"localhost:8091"}, {"accept-encoding", {'Accept-Encoding',"identity"}, nil, {"content-type", {'Content-Type', "application/x-www-form-urlencoded"}, {"authorization", {'Authorization', "Basic QWRtaW5pc3RyYXRvcjpwYXNzd29yZA=="}, nil,nil}, nil}}, nil}}} as ["pools","default","buckets"] [ns_server:info,2012-11-13T10:27:20.053,ns_1@127.0.0.1:<0.11001.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `default` [ns_server:info,2012-11-13T10:27:20.054,ns_1@127.0.0.1:<0.11001.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket default with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [user:info,2012-11-13T10:27:34.333,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:terminate:661]Shutting down bucket "default" on 'ns_1@127.0.0.1' for deletion [ns_server:info,2012-11-13T10:27:34.413,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/master">>: ok [ns_server:info,2012-11-13T10:27:34.415,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/9">>: ok [ns_server:info,2012-11-13T10:27:34.417,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/8">>: ok [ns_server:info,2012-11-13T10:27:34.419,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/7">>: ok [ns_server:info,2012-11-13T10:27:34.420,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/63">>: ok [ns_server:info,2012-11-13T10:27:34.422,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/62">>: ok [ns_server:info,2012-11-13T10:27:34.424,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/61">>: ok [ns_server:info,2012-11-13T10:27:34.425,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/60">>: ok [ns_server:info,2012-11-13T10:27:34.427,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/6">>: ok [ns_server:info,2012-11-13T10:27:34.429,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/59">>: ok [ns_server:info,2012-11-13T10:27:34.430,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/58">>: ok [ns_server:info,2012-11-13T10:27:34.433,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/57">>: ok [ns_server:info,2012-11-13T10:27:34.436,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/56">>: ok [ns_server:info,2012-11-13T10:27:34.437,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/55">>: ok [ns_server:info,2012-11-13T10:27:34.439,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/54">>: ok [ns_server:info,2012-11-13T10:27:34.440,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/53">>: ok [ns_server:info,2012-11-13T10:27:34.442,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/52">>: ok [ns_server:info,2012-11-13T10:27:34.443,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/51">>: ok [ns_server:info,2012-11-13T10:27:34.445,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/50">>: ok [ns_server:info,2012-11-13T10:27:34.446,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/5">>: ok [ns_server:info,2012-11-13T10:27:34.448,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/49">>: ok [ns_server:info,2012-11-13T10:27:34.449,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/48">>: ok [ns_server:info,2012-11-13T10:27:34.451,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/47">>: ok [ns_server:info,2012-11-13T10:27:34.452,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/46">>: ok [ns_server:info,2012-11-13T10:27:34.453,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/45">>: ok [ns_server:info,2012-11-13T10:27:34.455,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/44">>: ok [ns_server:info,2012-11-13T10:27:34.456,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/43">>: ok [ns_server:info,2012-11-13T10:27:34.458,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/42">>: ok [ns_server:info,2012-11-13T10:27:34.459,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/41">>: ok [ns_server:info,2012-11-13T10:27:34.461,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/40">>: ok [ns_server:info,2012-11-13T10:27:34.462,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/4">>: ok [ns_server:info,2012-11-13T10:27:34.464,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/39">>: ok [ns_server:info,2012-11-13T10:27:34.465,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/38">>: ok [ns_server:info,2012-11-13T10:27:34.466,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/37">>: ok [ns_server:info,2012-11-13T10:27:34.468,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/36">>: ok [ns_server:info,2012-11-13T10:27:34.469,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/35">>: ok [ns_server:info,2012-11-13T10:27:34.471,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/34">>: ok [ns_server:info,2012-11-13T10:27:34.472,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/33">>: ok [ns_server:info,2012-11-13T10:27:34.473,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/32">>: ok [ns_server:info,2012-11-13T10:27:34.475,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/31">>: ok [ns_server:info,2012-11-13T10:27:34.476,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/30">>: ok [ns_server:info,2012-11-13T10:27:34.478,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/3">>: ok [ns_server:info,2012-11-13T10:27:34.479,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/29">>: ok [ns_server:info,2012-11-13T10:27:34.481,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/28">>: ok [ns_server:info,2012-11-13T10:27:34.482,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/27">>: ok [ns_server:info,2012-11-13T10:27:34.483,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/26">>: ok [ns_server:info,2012-11-13T10:27:34.484,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/25">>: ok [ns_server:info,2012-11-13T10:27:34.486,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/24">>: ok [ns_server:info,2012-11-13T10:27:34.487,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/23">>: ok [ns_server:info,2012-11-13T10:27:34.488,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/22">>: ok [ns_server:info,2012-11-13T10:27:34.490,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/21">>: ok [ns_server:info,2012-11-13T10:27:34.491,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/20">>: ok [ns_server:info,2012-11-13T10:27:34.492,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/2">>: ok [ns_server:info,2012-11-13T10:27:34.493,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/19">>: ok [ns_server:info,2012-11-13T10:27:34.495,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/18">>: ok [ns_server:info,2012-11-13T10:27:34.496,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/17">>: ok [ns_server:info,2012-11-13T10:27:34.497,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/16">>: ok [ns_server:info,2012-11-13T10:27:34.498,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/15">>: ok [ns_server:info,2012-11-13T10:27:34.499,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/14">>: ok [ns_server:info,2012-11-13T10:27:34.500,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/13">>: ok [ns_server:info,2012-11-13T10:27:34.502,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/12">>: ok [ns_server:info,2012-11-13T10:27:34.503,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/11">>: ok [ns_server:info,2012-11-13T10:27:34.504,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/10">>: ok [ns_server:info,2012-11-13T10:27:34.505,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/1">>: ok [ns_server:info,2012-11-13T10:27:34.507,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"default/0">>: ok [ns_server:info,2012-11-13T10:27:34.507,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_storage_conf:delete_databases_and_files:475]Couch dbs are deleted. Proceeding with bucket directory [ns_server:info,2012-11-13T10:27:34.535,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:27:34.334628 PST 3: Shutting down tap connections! [ns_server:info,2012-11-13T10:27:34.739,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:idle:452]Restarting moxi on nodes ['ns_1@127.0.0.1'] [ns_server:info,2012-11-13T10:27:34.739,ns_1@127.0.0.1:<0.11154.0>:ns_port_sup:restart_port:134]restarting port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:info,2012-11-13T10:27:34.741,ns_1@127.0.0.1:<0.10105.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:27:34.741,ns_1@127.0.0.1:<0.10105.0>:ns_port_server:log:171]moxi<0.10105.0>: EOL on stdin. Exiting [menelaus:info,2012-11-13T10:27:34.743,ns_1@127.0.0.1:<0.10106.0>:menelaus_web_buckets:handle_bucket_delete:340]Deleted bucket "default" [error_logger:info,2012-11-13T10:27:34.746,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.11155.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:27:34.951,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:log:171]moxi<0.11156.0>: 2012-11-13 10:27:34: (cproxy_config.c.317) env: MOXI_SASL_PLAIN_USR (13) moxi<0.11156.0>: 2012-11-13 10:27:34: (cproxy_config.c.326) env: MOXI_SASL_PLAIN_PWD (8) [ns_server:info,2012-11-13T10:35:17.205,ns_1@127.0.0.1:samples_loader_tasks<0.4011.0>:samples_loader_tasks:maybe_pass_token:89]Passed samples loading token to task: beer-sample [error_logger:info,2012-11-13T10:35:17.207,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.12898.0>}, {name,{per_bucket_sup,"beer-sample"}}, {mfargs,{single_bucket_sup,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [stats:error,2012-11-13T10:35:17.447,ns_1@127.0.0.1:<0.10101.0>:stats_reader:log_bad_responses:191]Some nodes didn't respond: ['ns_1@127.0.0.1'] [ns_server:info,2012-11-13T10:35:17.460,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:log:171]moxi<0.11156.0>: 2012-11-13 10:35:19: (agent_config.c.705) ERROR: bad JSON configuration from http://127.0.0.1:8091/pools/default/saslBucketsStreaming: Number of vBuckets must be a power of two > 0 and <= 65536 ({ moxi<0.11156.0>: "name": "beer-sample", moxi<0.11156.0>: "nodeLocator": "vbucket", moxi<0.11156.0>: "saslPassword": "", moxi<0.11156.0>: "nodes": [{ moxi<0.11156.0>: "hostname": "127.0.0.1:8091", moxi<0.11156.0>: "ports": { moxi<0.11156.0>: "direct": 11210, moxi<0.11156.0>: "proxy": 11211 moxi<0.11156.0>: } moxi<0.11156.0>: }], moxi<0.11156.0>: "vBucketServerMap": { moxi<0.11156.0>: "hashAlgorithm": "CRC", moxi<0.11156.0>: "numReplicas": 1, moxi<0.11156.0>: "serverList": ["127.0.0.1:11210"], moxi<0.11156.0>: "vBucketMap": [] moxi<0.11156.0>: } moxi<0.11156.0>: }) [error_logger:info,2012-11-13T10:35:17.650,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12901.0>}, {name,{capi_set_view_manager,"beer-sample"}}, {mfargs, {capi_set_view_manager,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:error,2012-11-13T10:35:17.898,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,menelaus_sup} Context: child_terminated Reason: {noproc, {gen_server,call, [{'stats_reader-beer-sample','ns_1@127.0.0.1'}, {latest,minute,1}]}} Offender: [{pid,<0.3962.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:17.900,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.12927.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:info,2012-11-13T10:35:17.992,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:ensure_bucket:1119]Created bucket "beer-sample" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=104857600;tap_keepalive=300;dbname=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/beer-sample;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=beer-sample;couch_port=11213;max_vbuckets=64;alog_path=/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/beer-sample/access.log;data_traffic_enabled=false;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [error_logger:info,2012-11-13T10:35:17.995,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12922.0>}, {name,{ns_memcached,"beer-sample"}}, {mfargs,{ns_memcached,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:17.998,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12933.0>}, {name,{tap_replication_manager,"beer-sample"}}, {mfargs, {tap_replication_manager,start_link, ["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:17.999,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12934.0>}, {name,{ns_vbm_new_sup,"beer-sample"}}, {mfargs,{ns_vbm_new_sup,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2012-11-13T10:35:18.001,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12935.0>}, {name,{ns_vbm_sup,"beer-sample"}}, {mfargs,{ns_vbm_sup,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:info,2012-11-13T10:35:18.016,ns_1@127.0.0.1:janitor_agent-beer-sample<0.12936.0>:janitor_agent:read_flush_counter:764]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2012-11-13T10:35:18.016,ns_1@127.0.0.1:janitor_agent-beer-sample<0.12936.0>:janitor_agent:read_flush_counter_from_config:771]Initialized flushseq 0 from bucket config [error_logger:info,2012-11-13T10:35:18.017,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12936.0>}, {name,{janitor_agent,"beer-sample"}}, {mfargs,{janitor_agent,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.019,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12937.0>}, {name,{couch_stats_reader,"beer-sample"}}, {mfargs, {couch_stats_reader,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.020,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12938.0>}, {name,{stats_collector,"beer-sample"}}, {mfargs,{stats_collector,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.021,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12940.0>}, {name,{stats_archiver,"beer-sample"}}, {mfargs,{stats_archiver,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.022,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12942.0>}, {name,{stats_reader,"beer-sample"}}, {mfargs,{stats_reader,start_link,["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2012-11-13T10:35:18.023,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-beer-sample'} started: [{pid,<0.12943.0>}, {name,{failover_safeness_level,"beer-sample"}}, {mfargs, {failover_safeness_level,start_link, ["beer-sample"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2012-11-13T10:35:18.113,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:35:17.912626 PST 3: Trying to connect to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:35:17.941540 PST 3: Connected to mccouch: "localhost:11213" memcached<0.3972.0>: Tue Nov 13 10:35:17.991827 PST 3: Warning: failed to load the engine session stats due to IO exception "basic_ios::clear" memcached<0.3972.0>: Tue Nov 13 10:35:17.992033 PST 3: Failed to load mutation log, falling back to key dump memcached<0.3972.0>: Tue Nov 13 10:35:17.992232 PST 3: Extension support isn't implemented in this version of bucket_engine memcached<0.3972.0>: Tue Nov 13 10:35:17.992439 PST 3: metadata loaded in 14 ms memcached<0.3972.0>: Tue Nov 13 10:35:17.992989 PST 3: warmup completed in 14 ms [user:info,2012-11-13T10:35:18.154,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:handle_cast:581]Bucket "beer-sample" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:info,2012-11-13T10:35:18.209,ns_1@127.0.0.1:<0.12892.0>:ns_janitor:cleanup_with_membase_bucket_check_map:67]janitor decided to generate initial vbucket map [ns_server:info,2012-11-13T10:35:18.236,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 63 state to active [ns_server:info,2012-11-13T10:35:18.238,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 62 state to active [ns_server:info,2012-11-13T10:35:18.239,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 61 state to active [ns_server:info,2012-11-13T10:35:18.240,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 60 state to active [ns_server:info,2012-11-13T10:35:18.241,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 59 state to active [ns_server:info,2012-11-13T10:35:18.241,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 58 state to active [ns_server:info,2012-11-13T10:35:18.242,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 57 state to active [ns_server:info,2012-11-13T10:35:18.243,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 56 state to active [ns_server:info,2012-11-13T10:35:18.244,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 55 state to active [ns_server:info,2012-11-13T10:35:18.244,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 54 state to active [ns_server:info,2012-11-13T10:35:18.245,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 53 state to active [ns_server:info,2012-11-13T10:35:18.245,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 52 state to active [ns_server:info,2012-11-13T10:35:18.328,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 51 state to active [ns_server:info,2012-11-13T10:35:18.329,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 50 state to active [ns_server:info,2012-11-13T10:35:18.330,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 49 state to active [ns_server:info,2012-11-13T10:35:18.331,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 48 state to active [ns_server:info,2012-11-13T10:35:18.331,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 47 state to active [ns_server:info,2012-11-13T10:35:18.332,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 46 state to active [ns_server:info,2012-11-13T10:35:18.332,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 45 state to active [ns_server:info,2012-11-13T10:35:18.333,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 44 state to active [ns_server:info,2012-11-13T10:35:18.333,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 43 state to active [ns_server:info,2012-11-13T10:35:18.334,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 42 state to active [ns_server:info,2012-11-13T10:35:18.334,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 41 state to active [ns_server:info,2012-11-13T10:35:18.334,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 40 state to active [ns_server:info,2012-11-13T10:35:18.388,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 39 state to active [ns_server:info,2012-11-13T10:35:18.389,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 38 state to active [ns_server:info,2012-11-13T10:35:18.390,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 37 state to active [ns_server:info,2012-11-13T10:35:18.392,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 36 state to active [ns_server:info,2012-11-13T10:35:18.393,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 35 state to active [ns_server:info,2012-11-13T10:35:18.394,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 34 state to active [ns_server:info,2012-11-13T10:35:18.395,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 33 state to active [ns_server:info,2012-11-13T10:35:18.396,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 32 state to active [ns_server:info,2012-11-13T10:35:18.396,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 31 state to active [ns_server:info,2012-11-13T10:35:18.397,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 30 state to active [ns_server:info,2012-11-13T10:35:18.398,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 29 state to active [ns_server:info,2012-11-13T10:35:18.398,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 28 state to active [ns_server:info,2012-11-13T10:35:18.399,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 27 state to active [ns_server:info,2012-11-13T10:35:18.475,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 26 state to active [ns_server:info,2012-11-13T10:35:18.476,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 25 state to active [ns_server:info,2012-11-13T10:35:18.477,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 24 state to active [ns_server:info,2012-11-13T10:35:18.478,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 23 state to active [ns_server:info,2012-11-13T10:35:18.479,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 22 state to active [ns_server:info,2012-11-13T10:35:18.480,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 21 state to active [ns_server:info,2012-11-13T10:35:18.480,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 20 state to active [ns_server:info,2012-11-13T10:35:18.481,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 19 state to active [ns_server:info,2012-11-13T10:35:18.481,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 18 state to active [ns_server:info,2012-11-13T10:35:18.482,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 17 state to active [ns_server:info,2012-11-13T10:35:18.482,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 16 state to active [ns_server:info,2012-11-13T10:35:18.483,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 15 state to active [ns_server:info,2012-11-13T10:35:18.484,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 14 state to active [ns_server:info,2012-11-13T10:35:18.484,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 13 state to active [ns_server:info,2012-11-13T10:35:18.485,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 12 state to active [ns_server:info,2012-11-13T10:35:18.485,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 11 state to active [ns_server:info,2012-11-13T10:35:18.486,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 10 state to active [ns_server:info,2012-11-13T10:35:18.486,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 9 state to active [ns_server:info,2012-11-13T10:35:18.487,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 8 state to active [ns_server:info,2012-11-13T10:35:18.487,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 7 state to active [ns_server:info,2012-11-13T10:35:18.488,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 6 state to active [ns_server:info,2012-11-13T10:35:18.488,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 5 state to active [ns_server:info,2012-11-13T10:35:18.489,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 4 state to active [ns_server:info,2012-11-13T10:35:18.489,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 3 state to active [ns_server:info,2012-11-13T10:35:18.489,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 2 state to active [ns_server:info,2012-11-13T10:35:18.490,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 1 state to active [ns_server:info,2012-11-13T10:35:18.490,ns_1@127.0.0.1:<0.12932.0>:ns_memcached:do_handle_call:521]Changed vbucket 0 state to active [ns_server:info,2012-11-13T10:35:18.491,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:handle_call:244]Enabling traffic to bucket "beer-sample" [ns_server:info,2012-11-13T10:35:18.491,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:handle_call:248]Bucket "beer-sample" marked as warmed in 0 seconds [ns_server:info,2012-11-13T10:35:20.097,ns_1@127.0.0.1:<0.13060.0>:compaction_daemon:check_all_dbs_exist:1266]Skipping compaction of bucket `beer-sample` since at least database `beer-sample/0` seems to be missing. [ns_server:info,2012-11-13T10:35:22.002,ns_1@127.0.0.1:ns_doctor<0.3916.0>:ns_doctor:update_status:211]The following buckets became ready on node 'ns_1@127.0.0.1': ["beer-sample"] [ns_server:info,2012-11-13T10:35:50.100,ns_1@127.0.0.1:<0.13657.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `beer-sample` [ns_server:info,2012-11-13T10:35:50.101,ns_1@127.0.0.1:<0.13657.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket beer-sample with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:info,2012-11-13T10:36:06.153,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:handle_info:353]Processing update_ddoc _design/beer (false) [menelaus:warn,2012-11-13T10:36:10.435,ns_1@127.0.0.1:<0.13447.0>:menelaus_web:loop:342]Client-side error-report for user "Administrator" on node 'ns_1@127.0.0.1': User-Agent:Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11 Got unhandled error: Uncaught TypeError: Cannot call method 'concat' of undefined At: http://127.0.0.1:8091/js/analytics.js:186 Backtrace: Function: collectBacktraceViaCaller Args: --------- Function: appOnError Args: "Uncaught TypeError: Cannot call method 'concat' of undefined" "http://127.0.0.1:8091/js/analytics.js" 186 --------- [ns_server:info,2012-11-13T10:36:20.112,ns_1@127.0.0.1:<0.13872.0>:compaction_daemon:try_to_cleanup_indexes:439]Cleaning up indexes for bucket `beer-sample` [ns_server:info,2012-11-13T10:36:20.221,ns_1@127.0.0.1:<0.13872.0>:compaction_daemon:spawn_bucket_compactor:404]Compacting bucket beer-sample with config: [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [user:info,2012-11-13T10:36:43.365,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_memcached:terminate:661]Shutting down bucket "beer-sample" on 'ns_1@127.0.0.1' for deletion [ns_server:info,2012-11-13T10:36:43.443,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/master">>: ok [ns_server:info,2012-11-13T10:36:43.445,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/9">>: ok [ns_server:info,2012-11-13T10:36:43.447,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/8">>: ok [ns_server:info,2012-11-13T10:36:43.449,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/7">>: ok [ns_server:info,2012-11-13T10:36:43.450,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/63">>: ok [ns_server:info,2012-11-13T10:36:43.452,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/62">>: ok [ns_server:info,2012-11-13T10:36:43.454,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/61">>: ok [ns_server:info,2012-11-13T10:36:43.518,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/60">>: ok [ns_server:info,2012-11-13T10:36:43.521,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/6">>: ok [ns_server:info,2012-11-13T10:36:43.523,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/59">>: ok [ns_server:info,2012-11-13T10:36:43.525,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/58">>: ok [ns_server:info,2012-11-13T10:36:43.526,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/57">>: ok [ns_server:info,2012-11-13T10:36:43.528,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/56">>: ok [ns_server:info,2012-11-13T10:36:43.566,ns_1@127.0.0.1:ns_port_memcached<0.3972.0>:ns_port_server:log:171]memcached<0.3972.0>: Tue Nov 13 10:36:43.366118 PST 3: Shutting down tap connections! [ns_server:info,2012-11-13T10:36:43.566,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/55">>: ok [ns_server:info,2012-11-13T10:36:43.567,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/54">>: ok [ns_server:info,2012-11-13T10:36:43.569,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/53">>: ok [ns_server:info,2012-11-13T10:36:43.571,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/52">>: ok [ns_server:info,2012-11-13T10:36:43.572,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/51">>: ok [ns_server:info,2012-11-13T10:36:43.573,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/50">>: ok [ns_server:info,2012-11-13T10:36:43.575,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/5">>: ok [ns_server:info,2012-11-13T10:36:43.577,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/49">>: ok [ns_server:info,2012-11-13T10:36:43.579,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/48">>: ok [ns_server:info,2012-11-13T10:36:43.580,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/47">>: ok [ns_server:info,2012-11-13T10:36:43.582,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/46">>: ok [ns_server:info,2012-11-13T10:36:43.583,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/45">>: ok [ns_server:info,2012-11-13T10:36:43.584,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/44">>: ok [ns_server:info,2012-11-13T10:36:43.586,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/43">>: ok [ns_server:info,2012-11-13T10:36:43.587,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/42">>: ok [ns_server:info,2012-11-13T10:36:43.589,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/41">>: ok [ns_server:info,2012-11-13T10:36:43.590,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/40">>: ok [ns_server:info,2012-11-13T10:36:43.591,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/4">>: ok [ns_server:info,2012-11-13T10:36:43.593,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/39">>: ok [ns_server:info,2012-11-13T10:36:43.594,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/38">>: ok [ns_server:info,2012-11-13T10:36:43.595,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/37">>: ok [ns_server:info,2012-11-13T10:36:43.596,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/36">>: ok [ns_server:info,2012-11-13T10:36:43.598,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/35">>: ok [ns_server:info,2012-11-13T10:36:43.600,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/34">>: ok [ns_server:info,2012-11-13T10:36:43.601,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/33">>: ok [ns_server:info,2012-11-13T10:36:43.603,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/32">>: ok [ns_server:info,2012-11-13T10:36:43.604,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/31">>: ok [ns_server:info,2012-11-13T10:36:43.605,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/30">>: ok [ns_server:info,2012-11-13T10:36:43.606,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/3">>: ok [ns_server:info,2012-11-13T10:36:43.608,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/29">>: ok [ns_server:info,2012-11-13T10:36:43.609,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/28">>: ok [ns_server:info,2012-11-13T10:36:43.611,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/27">>: ok [ns_server:info,2012-11-13T10:36:43.612,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/26">>: ok [ns_server:info,2012-11-13T10:36:43.613,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/25">>: ok [ns_server:info,2012-11-13T10:36:43.614,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/24">>: ok [ns_server:info,2012-11-13T10:36:43.616,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/23">>: ok [ns_server:info,2012-11-13T10:36:43.617,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/22">>: ok [ns_server:info,2012-11-13T10:36:43.618,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/21">>: ok [ns_server:info,2012-11-13T10:36:43.619,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/20">>: ok [ns_server:info,2012-11-13T10:36:43.621,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/2">>: ok [ns_server:info,2012-11-13T10:36:43.622,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/19">>: ok [ns_server:info,2012-11-13T10:36:43.623,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/18">>: ok [ns_server:info,2012-11-13T10:36:43.624,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/17">>: ok [ns_server:info,2012-11-13T10:36:43.626,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/16">>: ok [ns_server:info,2012-11-13T10:36:43.627,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/15">>: ok [ns_server:info,2012-11-13T10:36:43.628,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/14">>: ok [ns_server:info,2012-11-13T10:36:43.629,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/13">>: ok [ns_server:info,2012-11-13T10:36:43.630,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/12">>: ok [ns_server:info,2012-11-13T10:36:43.632,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/11">>: ok [ns_server:info,2012-11-13T10:36:43.633,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/10">>: ok [ns_server:info,2012-11-13T10:36:43.634,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/1">>: ok [ns_server:info,2012-11-13T10:36:43.635,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_couch_database:432]Deleting database <<"beer-sample/0">>: ok [ns_server:info,2012-11-13T10:36:43.635,ns_1@127.0.0.1:ns_memcached-beer-sample<0.12922.0>:ns_storage_conf:delete_databases_and_files:475]Couch dbs are deleted. Proceeding with bucket directory [ns_server:info,2012-11-13T10:36:43.865,ns_1@127.0.0.1:<0.3929.0>:ns_orchestrator:idle:452]Restarting moxi on nodes ['ns_1@127.0.0.1'] [ns_server:info,2012-11-13T10:36:43.877,ns_1@127.0.0.1:<0.14246.0>:ns_port_sup:restart_port:134]restarting port: {moxi,"/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} [ns_server:info,2012-11-13T10:36:43.905,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:handle_info:104]Port server moxi exited with status 0 [ns_server:info,2012-11-13T10:36:43.905,ns_1@127.0.0.1:<0.11156.0>:ns_port_server:log:171]moxi<0.11156.0>: EOL on stdin. Exiting [menelaus:info,2012-11-13T10:36:43.907,ns_1@127.0.0.1:<0.13448.0>:menelaus_web_buckets:handle_bucket_delete:340]Deleted bucket "beer-sample" [error_logger:info,2012-11-13T10:36:43.947,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================PROGRESS REPORT========================= supervisor: {local,ns_port_sup} started: [{pid,<0.14271.0>}, {name, {moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,10000,ns_port_server,start_link, [moxi, "/Users/farshid/Downloads/couchbase-server-community_x86_64_2.0.0-1949-rel/Couchbase Server.app/Contents/Resources/couchbase-core/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","password"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [ns_server:info,2012-11-13T10:36:44.133,ns_1@127.0.0.1:<0.14272.0>:ns_port_server:log:171]moxi<0.14272.0>: 2012-11-13 10:36:43: (cproxy_config.c.317) env: MOXI_SASL_PLAIN_USR (13) moxi<0.14272.0>: 2012-11-13 10:36:43: (cproxy_config.c.326) env: MOXI_SASL_PLAIN_PWD (8) [error_logger:error,2012-11-13T10:37:13.430,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: samples_loader_tasks:perform_loading_task/1 pid: <0.12893.0> registered_name: [] exception exit: {failed_to_load_samples_with_status,1} in function samples_loader_tasks:perform_loading_task/1 ancestors: [samples_loader_tasks,ns_server_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [<0.4011.0>] dictionary: [] trap_exit: false status: running heap_size: 28657 stack_size: 24 reductions: 30019 neighbours: [user:error,2012-11-13T10:37:13.442,ns_1@127.0.0.1:samples_loader_tasks<0.4011.0>:samples_loader_tasks:handle_info:64]Loading sample bucket beer-sample failed: {failed_to_load_samples_with_status, 1} ------------------------------- logs_node (error): ------------------------------- [error_logger:error,2012-11-13T9:56:17.916,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.249.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:error,2012-11-13T9:56:21.458,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T9:56:21.728,ns_1@127.0.0.1:ns_heart<0.406.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:error,2012-11-13T9:56:32.506,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.492.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:error,2012-11-13T9:56:33.242,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.377.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:error,2012-11-13T9:56:33.491,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.483.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.483.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.480.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 2016 neighbours: [error_logger:error,2012-11-13T9:56:33.493,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.260.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.260.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.257.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 2583 neighbours: [error_logger:error,2012-11-13T9:56:33.494,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.484.0> terminating ** Last message in was {'EXIT',<0.245.0>,killed} ** When Server state == {db,<0.484.0>,<0.485.0>,nil,<<"1352829382613535">>, <0.480.0>,<0.486.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.480.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.480.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.480.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create,sys_db, {user_ctx, {user_ctx,null, [<<"_admin">>,<<"_replicator">>], undefined}}]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T9:56:33.498,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.484.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 313 neighbours: [error_logger:error,2012-11-13T9:56:33.499,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.261.0> terminating ** Last message in was {'EXIT',<0.245.0>,killed} ** When Server state == {db,<0.261.0>,<0.262.0>,nil,<<"1352829377441480">>, <0.257.0>,<0.263.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.257.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.257.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.257.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create, {user_ctx, {user_ctx,null,[<<"_admin">>],undefined}}, sys_db]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T9:56:33.503,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.261.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 395 neighbours: [error_logger:error,2012-11-13T9:56:33.897,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.620.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:error,2012-11-13T9:56:35.974,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T9:56:36.014,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T10:00:17.454,ns_1@127.0.0.1:ns_config<0.754.0>:ns_config:handle_call:623]Failed to update config: {exit,{not_found,"default"}} Stacktrace: [{ns_bucket,'-delete_bucket_returning_config/1-fun-0-',4}, {misc,key_update_rec,4}, {ns_config,'-update_sub_key/3-fun-0-',3}, {ns_config,'-update_key/2-fun-0-',3}, {ns_config,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}] [error_logger:error,2012-11-13T10:01:24.326,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.861.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:error,2012-11-13T10:01:24.526,ns_1@127.0.0.1:ns_heart<0.791.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:error,2012-11-13T10:01:24.655,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.764.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:error,2012-11-13T10:01:24.856,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.614.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.614.0>,<0.615.0>,nil,<<"1352829393849668">>, <0.611.0>,<0.616.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.611.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.611.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.611.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.861,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.614.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 241 neighbours: [error_logger:error,2012-11-13T10:01:24.862,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1905.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1905.0>,<0.1906.0>,nil,<<"1352829621747368">>, <0.1902.0>,<0.1907.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1902.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1902.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1902.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/46">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/46.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1905.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 573 neighbours: [error_logger:error,2012-11-13T10:01:24.866,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.608.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.608.0>,<0.609.0>,nil,<<"1352829393812330">>, <0.605.0>,<0.610.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.605.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.605.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.605.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.608.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 328 neighbours: [error_logger:error,2012-11-13T10:01:24.871,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1967.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1967.0>,<0.1968.0>,nil,<<"1352829622857071">>, <0.1964.0>,<0.1969.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1964.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1964.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1964.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/36">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/36.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1967.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 565 neighbours: [error_logger:error,2012-11-13T10:01:24.876,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1858.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1858.0>,<0.1859.0>,nil,<<"1352829620929634">>, <0.1855.0>,<0.1860.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1855.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1855.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1855.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/52">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/52.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.880,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1858.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 584 neighbours: [error_logger:error,2012-11-13T10:01:24.896,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2079.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2079.0>,<0.2080.0>,nil,<<"1352829624750495">>, <0.2076.0>,<0.2081.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2076.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2076.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2076.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/18">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/18.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.900,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2079.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.901,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2055.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2055.0>,<0.2056.0>,nil,<<"1352829624313759">>, <0.2052.0>,<0.2057.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2052.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2052.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2052.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/22">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/22.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.905,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2055.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.906,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2085.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2085.0>,<0.2086.0>,nil,<<"1352829624862447">>, <0.2082.0>,<0.2087.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2082.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2082.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2082.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/17">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/17.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2085.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2121.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2121.0>,<0.2122.0>,nil,<<"1352829625534387">>, <0.2118.0>,<0.2123.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2118.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2118.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2118.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/11">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/11.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.916,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2121.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 616 neighbours: [error_logger:error,2012-11-13T10:01:24.917,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2176.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2176.0>,<0.2177.0>,nil,<<"1352829626420340">>, <0.2173.0>,<0.2178.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2173.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2173.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2173.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/5">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/5.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2176.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 611 neighbours: [error_logger:error,2012-11-13T10:01:24.921,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2170.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2170.0>,<0.2171.0>,nil,<<"1352829626308033">>, <0.2167.0>,<0.2172.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2167.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2167.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2167.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/6">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/6.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.925,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2170.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 619 neighbours: [error_logger:error,2012-11-13T10:01:24.927,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.1693.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.1693.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.1690.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 863 neighbours: [error_logger:error,2012-11-13T10:01:24.928,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2139.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2139.0>,<0.2140.0>,nil,<<"1352829625961162">>, <0.2136.0>,<0.2141.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2136.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2136.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2136.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/8">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/8.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.932,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2139.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 644 neighbours: [error_logger:error,2012-11-13T10:01:24.932,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2182.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2182.0>,<0.2183.0>,nil,<<"1352829626532283">>, <0.2179.0>,<0.2184.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2179.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2179.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2179.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/4">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/4.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.937,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2182.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 539 neighbours: [error_logger:error,2012-11-13T10:01:24.938,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1949.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1949.0>,<0.1950.0>,nil,<<"1352829622541445">>, <0.1946.0>,<0.1951.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1946.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1946.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1946.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/39">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/39.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.942,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1949.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 673 neighbours: [error_logger:error,2012-11-13T10:01:24.943,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1864.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1864.0>,<0.1865.0>,nil,<<"1352829621044192">>, <0.1861.0>,<0.1876.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1861.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1861.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1861.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/51">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/51.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.946,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1864.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 668 neighbours: [error_logger:error,2012-11-13T10:01:24.947,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2097.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2097.0>,<0.2098.0>,nil,<<"1352829625075442">>, <0.2094.0>,<0.2099.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2094.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2094.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2094.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/15">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/15.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.950,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1887.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1887.0>,<0.1888.0>,nil,<<"1352829621295039">>, <0.1884.0>,<0.1889.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1884.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1884.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1884.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/49">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/49.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.953,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2097.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 676 neighbours: [error_logger:error,2012-11-13T10:01:24.954,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1816.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1816.0>,<0.1817.0>,nil,<<"1352829620203018">>, <0.1813.0>,<0.1818.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1813.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1813.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1813.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/59">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/59.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1887.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 693 neighbours: [error_logger:error,2012-11-13T10:01:24.959,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1816.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 688 neighbours: [error_logger:error,2012-11-13T10:01:24.959,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1880.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1880.0>,<0.1881.0>,nil,<<"1352829621166446">>, <0.1877.0>,<0.1883.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1877.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/50">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/50.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.963,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1880.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:24.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1955.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1955.0>,<0.1956.0>,nil,<<"1352829622642724">>, <0.1952.0>,<0.1957.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1952.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1952.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1952.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/38">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/38.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.968,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1955.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 725 neighbours: [error_logger:error,2012-11-13T10:01:24.969,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1822.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1822.0>,<0.1823.0>,nil,<<"1352829620303621">>, <0.1819.0>,<0.1824.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1819.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1819.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1819.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/58">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/58.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.972,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2043.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2043.0>,<0.2044.0>,nil,<<"1352829624110888">>, <0.2040.0>,<0.2045.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2040.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2040.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2040.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/24">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/24.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.976,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1822.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 720 neighbours: [error_logger:error,2012-11-13T10:01:24.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2043.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 724 neighbours: [error_logger:error,2012-11-13T10:01:24.978,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2091.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2091.0>,<0.2092.0>,nil,<<"1352829624963872">>, <0.2088.0>,<0.2093.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2088.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2088.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2088.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/16">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/16.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2091.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 736 neighbours: [error_logger:error,2012-11-13T10:01:24.982,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2015.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2015.0>,<0.2016.0>,nil,<<"1352829623674489">>, <0.2012.0>,<0.2017.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2012.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2012.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2012.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/28">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/28.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.986,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2015.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 744 neighbours: [error_logger:error,2012-11-13T10:01:24.987,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1840.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1840.0>,<0.1841.0>,nil,<<"1352829620605270">>, <0.1837.0>,<0.1842.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1837.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1837.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1837.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/55">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/55.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:24.990,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1840.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 752 neighbours: [error_logger:error,2012-11-13T10:01:25.114,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1846.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1846.0>,<0.1847.0>,nil,<<"1352829620717440">>, <0.1843.0>,<0.1848.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1843.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1843.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1843.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/54">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/54.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.119,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1846.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 764 neighbours: [error_logger:error,2012-11-13T10:01:25.119,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2200.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2200.0>,<0.2201.0>,nil,<<"1352829626845747">>, <0.2197.0>,<0.2202.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2197.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2197.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2197.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/1">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/1.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.123,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2200.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 759 neighbours: [error_logger:error,2012-11-13T10:01:25.124,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1852.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1852.0>,<0.1853.0>,nil,<<"1352829620818124">>, <0.1849.0>,<0.1854.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1849.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1849.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1849.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/53">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/53.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.128,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1852.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 780 neighbours: [error_logger:error,2012-11-13T10:01:25.129,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1828.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1828.0>,<0.1829.0>,nil,<<"1352829620404369">>, <0.1825.0>,<0.1830.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1825.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1825.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1825.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/57">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/57.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.134,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1828.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 788 neighbours: [error_logger:error,2012-11-13T10:01:25.135,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2049.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2049.0>,<0.2050.0>,nil,<<"1352829624211604">>, <0.2046.0>,<0.2051.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2046.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2046.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2046.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/23">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/23.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.138,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2049.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 791 neighbours: [error_logger:error,2012-11-13T10:01:25.141,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1802.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1802.0>,<0.1803.0>,nil,<<"1352829619934967">>, <0.1799.0>,<0.1804.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1799.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/61">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/61.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.144,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1802.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 824 neighbours: [error_logger:error,2012-11-13T10:01:25.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2009.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2009.0>,<0.2010.0>,nil,<<"1352829623563532">>, <0.2006.0>,<0.2011.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2006.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2006.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2006.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/29">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/29.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.149,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2009.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 832 neighbours: [error_logger:error,2012-11-13T10:01:25.149,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1834.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1834.0>,<0.1835.0>,nil,<<"1352829620504957">>, <0.1831.0>,<0.1836.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1831.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1831.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1831.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/56">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/56.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.153,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1834.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 844 neighbours: [error_logger:error,2012-11-13T10:01:25.153,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2133.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2133.0>,<0.2134.0>,nil,<<"1352829625860252">>, <0.2130.0>,<0.2135.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2130.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2130.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2130.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/9">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/9.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.157,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2133.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 852 neighbours: [error_logger:error,2012-11-13T10:01:25.157,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2003.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2003.0>,<0.2004.0>,nil,<<"1352829623450371">>, <0.2000.0>,<0.2005.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2000.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2000.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2000.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/30">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/30.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.161,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2003.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 860 neighbours: [error_logger:error,2012-11-13T10:01:25.161,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1925.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1925.0>,<0.1926.0>,nil,<<"1352829622094265">>, <0.1922.0>,<0.1927.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1922.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1922.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1922.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/43">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/43.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.164,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1808.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1808.0>,<0.1809.0>,nil,<<"1352829620105814">>, <0.1805.0>,<0.1812.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1805.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/60">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/60.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.168,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1925.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 881 neighbours: [error_logger:error,2012-11-13T10:01:25.169,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1808.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.169,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2109.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2109.0>,<0.2110.0>,nil,<<"1352829625288026">>, <0.2106.0>,<0.2111.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2106.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2106.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2106.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/13">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/13.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.173,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2109.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.173,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1979.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1979.0>,<0.1980.0>,nil,<<"1352829623047373">>, <0.1976.0>,<0.1981.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1976.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1976.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1976.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/34">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/34.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.177,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1979.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 573 neighbours: [error_logger:error,2012-11-13T10:01:25.177,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1899.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1899.0>,<0.1900.0>,nil,<<"1352829621624691">>, <0.1896.0>,<0.1901.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1896.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1896.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1896.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/47">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/47.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.180,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1961.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1961.0>,<0.1962.0>,nil,<<"1352829622754407">>, <0.1958.0>,<0.1963.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1958.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1958.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1958.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/37">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/37.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.184,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1899.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 581 neighbours: [error_logger:error,2012-11-13T10:01:25.185,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1961.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 585 neighbours: [error_logger:error,2012-11-13T10:01:25.186,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2127.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2127.0>,<0.2128.0>,nil,<<"1352829625668919">>, <0.2124.0>,<0.2129.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2124.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2124.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2124.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/10">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/10.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.189,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2127.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 584 neighbours: [error_logger:error,2012-11-13T10:01:25.190,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2021.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2021.0>,<0.2022.0>,nil,<<"1352829623786335">>, <0.2018.0>,<0.2023.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2018.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2018.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2018.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/27">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/27.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.193,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2021.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.194,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1937.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1937.0>,<0.1938.0>,nil,<<"1352829622323724">>, <0.1934.0>,<0.1939.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1934.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1934.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1934.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/41">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/41.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.197,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1937.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 565 neighbours: [error_logger:error,2012-11-13T10:01:25.198,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2073.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2073.0>,<0.2074.0>,nil,<<"1352829624649630">>, <0.2070.0>,<0.2075.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2070.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2070.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2070.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/19">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/19.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.201,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2073.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 940 neighbours: [error_logger:error,2012-11-13T10:01:25.202,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2067.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2067.0>,<0.2068.0>,nil,<<"1352829624537966">>, <0.2064.0>,<0.2069.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2064.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2064.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2064.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/20">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/20.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.206,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2067.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.206,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1796.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1796.0>,<0.1797.0>,nil,<<"1352829619826563">>, <0.1793.0>,<0.1798.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1793.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/62">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/62.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.210,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1796.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 956 neighbours: [error_logger:error,2012-11-13T10:01:25.210,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2206.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2206.0>,<0.2207.0>,nil,<<"1352829626935149">>, <0.2203.0>,<0.2208.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2203.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2203.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2203.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/0">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/0.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.214,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2206.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 951 neighbours: [error_logger:error,2012-11-13T10:01:25.215,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2115.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2115.0>,<0.2116.0>,nil,<<"1352829625388570">>, <0.2112.0>,<0.2117.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2112.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2112.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2112.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/12">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/12.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.218,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2115.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 972 neighbours: [error_logger:error,2012-11-13T10:01:25.219,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1973.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1973.0>,<0.1974.0>,nil,<<"1352829622946642">>, <0.1970.0>,<0.1975.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1970.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1970.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1970.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/35">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/35.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.222,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1973.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 993 neighbours: [error_logger:error,2012-11-13T10:01:25.223,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2188.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2188.0>,<0.2189.0>,nil,<<"1352829626632806">>, <0.2185.0>,<0.2190.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2185.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2185.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2185.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/3">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/3.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.227,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2188.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 975 neighbours: [error_logger:error,2012-11-13T10:01:25.228,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2145.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2145.0>,<0.2146.0>,nil,<<"1352829626107228">>, <0.2142.0>,<0.2162.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2142.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2142.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2142.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/7">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/7.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.231,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2145.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 552 neighbours: [error_logger:error,2012-11-13T10:01:25.232,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1931.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1931.0>,<0.1932.0>,nil,<<"1352829622206092">>, <0.1928.0>,<0.1933.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1928.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1928.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1928.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/42">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/42.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.235,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1931.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 569 neighbours: [error_logger:error,2012-11-13T10:01:25.236,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2103.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2103.0>,<0.2104.0>,nil,<<"1352829625181072">>, <0.2100.0>,<0.2105.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2100.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2100.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2100.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/14">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/14.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.239,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2103.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 564 neighbours: [error_logger:error,2012-11-13T10:01:25.240,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1911.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1911.0>,<0.1912.0>,nil,<<"1352829621859289">>, <0.1908.0>,<0.1913.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1908.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1908.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1908.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/45">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/45.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.243,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1911.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1033 neighbours: [error_logger:error,2012-11-13T10:01:25.244,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1997.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1997.0>,<0.1998.0>,nil,<<"1352829623349426">>, <0.1994.0>,<0.1999.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1994.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1994.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1994.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/31">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/31.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.247,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1997.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1028 neighbours: [error_logger:error,2012-11-13T10:01:25.248,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2033.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2033.0>,<0.2034.0>,nil,<<"1352829624009960">>, <0.2030.0>,<0.2039.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2030.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2030.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2030.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/25">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/25.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.251,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2033.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1036 neighbours: [error_logger:error,2012-11-13T10:01:25.252,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1893.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1893.0>,<0.1894.0>,nil,<<"1352829621487024">>, <0.1890.0>,<0.1895.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1890.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1890.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1890.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/48">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/48.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1893.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1057 neighbours: [error_logger:error,2012-11-13T10:01:25.256,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1991.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1991.0>,<0.1992.0>,nil,<<"1352829623248968">>, <0.1988.0>,<0.1993.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1988.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1988.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1988.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/32">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/32.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.259,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1991.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1052 neighbours: [error_logger:error,2012-11-13T10:01:25.260,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2027.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2027.0>,<0.2028.0>,nil,<<"1352829623887061">>, <0.2024.0>,<0.2029.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2024.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2024.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2024.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/26">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/26.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1694.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1694.0>,<0.1695.0>,nil,<<"1352829618011823">>, <0.1690.0>,<0.1700.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.1690.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/master">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/master.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create]} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2027.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1060 neighbours: [error_logger:error,2012-11-13T10:01:25.268,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1694.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 855 neighbours: [error_logger:error,2012-11-13T10:01:25.269,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1943.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1943.0>,<0.1944.0>,nil,<<"1352829622429697">>, <0.1940.0>,<0.1945.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1940.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1940.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1940.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/40">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/40.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.272,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1943.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1089 neighbours: [error_logger:error,2012-11-13T10:01:25.273,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1917.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1917.0>,<0.1918.0>,nil,<<"1352829621982184">>, <0.1914.0>,<0.1921.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1914.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1914.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1914.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/44">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/44.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.276,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1917.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 1097 neighbours: [error_logger:error,2012-11-13T10:01:25.277,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2194.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2194.0>,<0.2195.0>,nil,<<"1352829626733573">>, <0.2191.0>,<0.2196.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2191.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2191.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2191.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/2">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/2.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1786.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1786.0>,<0.1787.0>,nil,<<"1352829619455301">>, <0.1783.0>,<0.1790.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1783.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1783.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1783.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/63">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/63.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.283,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2194.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 539 neighbours: [error_logger:error,2012-11-13T10:01:25.284,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1786.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.285,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.1985.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.1985.0>,<0.1986.0>,nil,<<"1352829623147982">>, <0.1982.0>,<0.1987.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.1982.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1982.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.1982.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/33">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/33.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.288,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.1985.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.289,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2061.0> terminating ** Last message in was {'EXIT',<0.604.0>,killed} ** When Server state == {db,<0.2061.0>,<0.2062.0>,nil,<<"1352829624436684">>, <0.2058.0>,<0.2063.0>, {db_header,11,0,nil,nil, <<0,0,0,0,16,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2058.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2058.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2058.0>, {4142,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/21">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/21.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:25.292,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2061.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 556 neighbours: [error_logger:error,2012-11-13T10:01:25.800,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.2892.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:error,2012-11-13T10:01:29.597,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T10:01:29.667,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [error_logger:error,2012-11-13T10:01:38.585,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.3189.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:error,2012-11-13T10:01:38.786,ns_1@127.0.0.1:ns_heart<0.3117.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T10:01:38.809,ns_1@127.0.0.1:ns_doctor<0.3120.0>:ns_doctor:update_status:205]The following buckets became not ready on node 'ns_1@127.0.0.1': ["default"], those of them are active [] [error_logger:error,2012-11-13T10:01:38.964,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.3090.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:error,2012-11-13T10:01:39.088,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.2489.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.2489.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.2487.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 377 stack_size: 24 reductions: 691 neighbours: [error_logger:error,2012-11-13T10:01:39.089,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2754.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2754.0>,<0.2755.0>,nil,<<"1352829685736678">>, <0.2751.0>,<0.2756.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2751.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2751.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2751.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/47">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/47.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.094,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2754.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 439 neighbours: [error_logger:error,2012-11-13T10:01:39.095,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2682.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2682.0>,<0.2683.0>,nil,<<"1352829685721321">>, <0.2679.0>,<0.2684.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2679.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2679.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2679.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/36">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/36.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.100,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2682.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 447 neighbours: [error_logger:error,2012-11-13T10:01:39.100,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2718.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2718.0>,<0.2719.0>,nil,<<"1352829685729409">>, <0.2715.0>,<0.2720.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2715.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2715.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2715.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/41">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/41.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.106,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2718.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 455 neighbours: [error_logger:error,2012-11-13T10:01:39.107,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2832.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2832.0>,<0.2833.0>,nil,<<"1352829685753066">>, <0.2829.0>,<0.2834.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2829.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2829.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2829.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/59">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/59.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.112,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2832.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 450 neighbours: [error_logger:error,2012-11-13T10:01:39.113,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2826.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2826.0>,<0.2827.0>,nil,<<"1352829685751685">>, <0.2823.0>,<0.2828.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2823.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2823.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2823.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/58">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/58.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.117,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2826.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 458 neighbours: [error_logger:error,2012-11-13T10:01:39.118,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2856.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2856.0>,<0.2857.0>,nil,<<"1352829685758407">>, <0.2853.0>,<0.2858.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2853.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2853.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2853.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/62">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/62.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.124,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2856.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 466 neighbours: [error_logger:error,2012-11-13T10:01:39.125,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2808.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2808.0>,<0.2809.0>,nil,<<"1352829685747757">>, <0.2805.0>,<0.2810.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2805.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2805.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/55">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/55.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.130,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2808.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 474 neighbours: [error_logger:error,2012-11-13T10:01:39.131,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2862.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2862.0>,<0.2863.0>,nil,<<"1352829685759790">>, <0.2859.0>,<0.2864.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2859.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2859.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2859.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/63">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/63.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.135,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2862.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 482 neighbours: [error_logger:error,2012-11-13T10:01:39.136,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2496.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2496.0>,<0.2497.0>,nil,<<"1352829685577250">>, <0.2493.0>,<0.2498.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.2493.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2493.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2493.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.140,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2814.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2814.0>,<0.2815.0>,nil,<<"1352829685749123">>, <0.2811.0>,<0.2816.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2811.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2811.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2811.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/56">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/56.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.143,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2496.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 987 stack_size: 24 reductions: 309 neighbours: [error_logger:error,2012-11-13T10:01:39.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2814.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 494 neighbours: [error_logger:error,2012-11-13T10:01:39.145,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2850.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2850.0>,<0.2851.0>,nil,<<"1352829685756921">>, <0.2847.0>,<0.2852.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2847.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2847.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2847.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/61">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/61.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.148,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2796.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2796.0>,<0.2797.0>,nil,<<"1352829685745381">>, <0.2793.0>,<0.2798.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2793.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2793.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/53">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/53.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.152,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2796.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 422 neighbours: [error_logger:error,2012-11-13T10:01:39.154,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2850.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 506 neighbours: [error_logger:error,2012-11-13T10:01:39.156,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2640.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2640.0>,<0.2641.0>,nil,<<"1352829685711040">>, <0.2637.0>,<0.2642.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2637.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2637.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2637.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/3">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/3.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.236,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2640.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 426 neighbours: [error_logger:error,2012-11-13T10:01:39.237,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2490.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2490.0>,<0.2491.0>,nil,<<"1352829685334923">>, <0.2487.0>,<0.2492.0>, {db_header,11,1, <<0,0,0,0,11,84,0,0,0,0,0,62,0,0,0,0,1,0,0,0,0, 0,0,0,0,0,11,50>>, <<0,0,0,0,11,146,0,0,0,0,0,60,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.2487.0>, {2900, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,11,50>>, 62}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2487.0>, {2962,<<0,0,0,0,1>>,60}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2487.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 1,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.241,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2490.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 436 neighbours: [error_logger:error,2012-11-13T10:01:39.242,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2502.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2502.0>,<0.2503.0>,nil,<<"1352829685683035">>, <0.2499.0>,<0.2504.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2499.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2499.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2499.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/0">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/0.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.245,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2502.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 554 neighbours: [error_logger:error,2012-11-13T10:01:39.246,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2712.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2712.0>,<0.2713.0>,nil,<<"1352829685727974">>, <0.2709.0>,<0.2714.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2709.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2709.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2709.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/40">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/40.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.249,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2712.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 575 neighbours: [error_logger:error,2012-11-13T10:01:39.250,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2586.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2586.0>,<0.2587.0>,nil,<<"1352829685699966">>, <0.2583.0>,<0.2588.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2583.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2583.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2583.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/21">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/21.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.253,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2586.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 570 neighbours: [error_logger:error,2012-11-13T10:01:39.254,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2784.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2784.0>,<0.2785.0>,nil,<<"1352829685742942">>, <0.2781.0>,<0.2786.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2781.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2781.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2781.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/51">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/51.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.257,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2784.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 578 neighbours: [error_logger:error,2012-11-13T10:01:39.258,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2874.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2874.0>,<0.2875.0>,nil,<<"1352829685762498">>, <0.2871.0>,<0.2876.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2871.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2871.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2871.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/8">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/8.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2874.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 586 neighbours: [error_logger:error,2012-11-13T10:01:39.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2616.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2616.0>,<0.2617.0>,nil,<<"1352829685706449">>, <0.2613.0>,<0.2618.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2613.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2613.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2613.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/26">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/26.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2616.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 594 neighbours: [error_logger:error,2012-11-13T10:01:39.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2634.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2634.0>,<0.2635.0>,nil,<<"1352829685709980">>, <0.2631.0>,<0.2636.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2631.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2631.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2631.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/29">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/29.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.270,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2634.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 615 neighbours: [error_logger:error,2012-11-13T10:01:39.271,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2670.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2670.0>,<0.2671.0>,nil,<<"1352829685718719">>, <0.2667.0>,<0.2672.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2667.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2667.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2667.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/34">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/34.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.274,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2670.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 627 neighbours: [error_logger:error,2012-11-13T10:01:39.275,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2610.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2610.0>,<0.2611.0>,nil,<<"1352829685705115">>, <0.2607.0>,<0.2612.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2607.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2607.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2607.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/25">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/25.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.278,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2610.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 622 neighbours: [error_logger:error,2012-11-13T10:01:39.279,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2778.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2778.0>,<0.2779.0>,nil,<<"1352829685741611">>, <0.2775.0>,<0.2780.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2775.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2775.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2775.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/50">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/50.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.283,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2778.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 422 neighbours: [error_logger:error,2012-11-13T10:01:39.285,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2520.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2520.0>,<0.2521.0>,nil,<<"1352829685687483">>, <0.2517.0>,<0.2522.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2517.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2517.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2517.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/11">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/11.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.288,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2520.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 438 neighbours: [error_logger:error,2012-11-13T10:01:39.289,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2568.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2568.0>,<0.2569.0>,nil,<<"1352829685695867">>, <0.2565.0>,<0.2570.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2565.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2565.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2565.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/19">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/19.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.292,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2568.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 446 neighbours: [error_logger:error,2012-11-13T10:01:39.293,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2688.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2688.0>,<0.2689.0>,nil,<<"1352829685722740">>, <0.2685.0>,<0.2690.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2685.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2685.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2685.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/37">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/37.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.296,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2688.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 467 neighbours: [error_logger:error,2012-11-13T10:01:39.297,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2550.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2550.0>,<0.2551.0>,nil,<<"1352829685692516">>, <0.2547.0>,<0.2552.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2547.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2547.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2547.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/16">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/16.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.300,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2550.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 462 neighbours: [error_logger:error,2012-11-13T10:01:39.301,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2742.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2742.0>,<0.2743.0>,nil,<<"1352829685734113">>, <0.2739.0>,<0.2744.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2739.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2739.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2739.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/45">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/45.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.304,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2742.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 483 neighbours: [error_logger:error,2012-11-13T10:01:39.305,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2700.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2700.0>,<0.2701.0>,nil,<<"1352829685725516">>, <0.2697.0>,<0.2702.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2697.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2697.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2697.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/39">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/39.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.308,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2700.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 491 neighbours: [error_logger:error,2012-11-13T10:01:39.309,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2838.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2838.0>,<0.2839.0>,nil,<<"1352829685754116">>, <0.2835.0>,<0.2840.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2835.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2835.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2835.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/6">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/6.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.312,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2838.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 694 neighbours: [error_logger:error,2012-11-13T10:01:39.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2868.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2868.0>,<0.2869.0>,nil,<<"1352829685761171">>, <0.2865.0>,<0.2870.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2865.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2865.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2865.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/7">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/7.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.317,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.2885.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.2885.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.2883.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 729 neighbours: [error_logger:error,2012-11-13T10:01:39.319,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2868.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 702 neighbours: [error_logger:error,2012-11-13T10:01:39.319,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2532.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2532.0>,<0.2533.0>,nil,<<"1352829685689439">>, <0.2529.0>,<0.2534.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2529.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2529.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2529.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/13">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/13.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.323,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2532.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 426 neighbours: [error_logger:error,2012-11-13T10:01:39.324,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2544.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2544.0>,<0.2545.0>,nil,<<"1352829685691394">>, <0.2541.0>,<0.2546.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2541.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2541.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2541.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/15">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/15.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.328,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2544.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 434 neighbours: [error_logger:error,2012-11-13T10:01:39.329,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2766.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2766.0>,<0.2767.0>,nil,<<"1352829685738876">>, <0.2763.0>,<0.2768.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2763.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2763.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2763.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/49">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/49.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.332,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2766.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 442 neighbours: [error_logger:error,2012-11-13T10:01:39.333,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2820.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2820.0>,<0.2821.0>,nil,<<"1352829685750202">>, <0.2817.0>,<0.2822.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2817.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2817.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2817.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/57">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/57.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.336,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2820.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 450 neighbours: [error_logger:error,2012-11-13T10:01:39.337,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2598.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2598.0>,<0.2599.0>,nil,<<"1352829685702622">>, <0.2595.0>,<0.2600.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2595.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2595.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2595.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/23">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/23.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.340,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2598.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 458 neighbours: [error_logger:error,2012-11-13T10:01:39.341,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2628.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2628.0>,<0.2629.0>,nil,<<"1352829685708726">>, <0.2625.0>,<0.2630.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2625.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2625.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2625.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/28">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/28.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.345,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2628.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 479 neighbours: [error_logger:error,2012-11-13T10:01:39.346,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2652.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2652.0>,<0.2653.0>,nil,<<"1352829685713972">>, <0.2649.0>,<0.2654.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2649.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2649.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2649.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/31">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/31.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.458,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2652.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 474 neighbours: [error_logger:error,2012-11-13T10:01:39.459,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2646.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2646.0>,<0.2647.0>,nil,<<"1352829685712452">>, <0.2643.0>,<0.2648.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2643.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2643.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2643.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/30">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/30.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.462,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2646.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 482 neighbours: [error_logger:error,2012-11-13T10:01:39.463,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2622.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2622.0>,<0.2623.0>,nil,<<"1352829685707601">>, <0.2619.0>,<0.2624.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2619.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2619.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2619.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/27">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/27.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2622.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 490 neighbours: [error_logger:error,2012-11-13T10:01:39.467,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2802.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2802.0>,<0.2803.0>,nil,<<"1352829685746658">>, <0.2799.0>,<0.2804.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2799.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2799.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/54">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/54.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.471,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2802.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 498 neighbours: [error_logger:error,2012-11-13T10:01:39.472,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2514.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2514.0>,<0.2515.0>,nil,<<"1352829685686322">>, <0.2511.0>,<0.2516.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2511.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2511.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2511.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/10">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/10.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.475,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2514.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 506 neighbours: [error_logger:error,2012-11-13T10:01:39.476,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2772.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2772.0>,<0.2773.0>,nil,<<"1352829685740221">>, <0.2769.0>,<0.2774.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2769.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2769.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2769.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/5">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/5.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.479,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2772.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 514 neighbours: [error_logger:error,2012-11-13T10:01:39.480,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2880.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2880.0>,<0.2881.0>,nil,<<"1352829685763855">>, <0.2877.0>,<0.2882.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2877.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2877.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/9">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/9.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.483,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2880.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 522 neighbours: [error_logger:error,2012-11-13T10:01:39.484,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2538.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2538.0>,<0.2539.0>,nil,<<"1352829685690418">>, <0.2535.0>,<0.2540.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2535.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2535.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2535.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/14">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/14.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.488,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2538.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 530 neighbours: [error_logger:error,2012-11-13T10:01:39.488,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2508.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2508.0>,<0.2509.0>,nil,<<"1352829685684645">>, <0.2505.0>,<0.2510.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2505.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2505.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2505.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/1">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/1.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.492,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2508.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 538 neighbours: [error_logger:error,2012-11-13T10:01:39.493,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2844.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2844.0>,<0.2845.0>,nil,<<"1352829685755504">>, <0.2841.0>,<0.2846.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2841.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2841.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2841.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/60">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/60.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.496,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2844.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 546 neighbours: [error_logger:error,2012-11-13T10:01:39.497,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2562.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2562.0>,<0.2563.0>,nil,<<"1352829685694660">>, <0.2559.0>,<0.2564.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2559.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2559.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2559.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/18">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/18.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.500,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2562.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 554 neighbours: [error_logger:error,2012-11-13T10:01:39.501,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2676.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2676.0>,<0.2677.0>,nil,<<"1352829685719901">>, <0.2673.0>,<0.2678.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2673.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2673.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2673.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/35">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/35.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2676.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 575 neighbours: [error_logger:error,2012-11-13T10:01:39.505,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2526.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2526.0>,<0.2527.0>,nil,<<"1352829685688455">>, <0.2523.0>,<0.2528.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2523.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2523.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2523.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/12">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/12.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.508,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2724.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2724.0>,<0.2725.0>,nil,<<"1352829685730532">>, <0.2721.0>,<0.2726.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2721.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2721.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2721.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/42">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/42.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.512,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2526.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 570 neighbours: [error_logger:error,2012-11-13T10:01:39.513,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2724.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 587 neighbours: [error_logger:error,2012-11-13T10:01:39.514,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2760.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2760.0>,<0.2761.0>,nil,<<"1352829685737859">>, <0.2757.0>,<0.2762.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2757.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2757.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2757.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/48">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/48.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2760.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 599 neighbours: [error_logger:error,2012-11-13T10:01:39.518,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2706.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2706.0>,<0.2707.0>,nil,<<"1352829685726622">>, <0.2703.0>,<0.2708.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2703.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2703.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2703.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/4">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/4.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.522,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2706.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 594 neighbours: [error_logger:error,2012-11-13T10:01:39.523,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2694.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2694.0>,<0.2695.0>,nil,<<"1352829685723978">>, <0.2691.0>,<0.2696.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2691.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2691.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2691.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/38">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/38.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.526,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2694.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 602 neighbours: [error_logger:error,2012-11-13T10:01:39.527,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2556.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2556.0>,<0.2557.0>,nil,<<"1352829685693528">>, <0.2553.0>,<0.2558.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2553.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2553.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2553.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/17">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/17.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.530,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2556.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 610 neighbours: [error_logger:error,2012-11-13T10:01:39.531,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2748.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2748.0>,<0.2749.0>,nil,<<"1352829685735209">>, <0.2745.0>,<0.2750.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2745.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2745.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2745.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/46">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/46.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.535,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2748.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 631 neighbours: [error_logger:error,2012-11-13T10:01:39.536,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2574.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2574.0>,<0.2575.0>,nil,<<"1352829685697361">>, <0.2571.0>,<0.2576.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2571.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2571.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2571.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/2">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/2.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.539,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2736.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2736.0>,<0.2737.0>,nil,<<"1352829685732765">>, <0.2733.0>,<0.2738.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2733.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2733.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2733.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/44">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/44.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.542,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2574.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 626 neighbours: [error_logger:error,2012-11-13T10:01:39.544,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2736.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 643 neighbours: [error_logger:error,2012-11-13T10:01:39.544,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2790.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2790.0>,<0.2791.0>,nil,<<"1352829685744125">>, <0.2787.0>,<0.2792.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2787.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2787.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2787.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/52">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/52.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.548,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2790.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 642 neighbours: [error_logger:error,2012-11-13T10:01:39.549,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2886.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2886.0>,<0.2887.0>,nil,<<"1352829685765663">>, <0.2883.0>,<0.2888.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.2883.0>,nil, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/master">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/master.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.551,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2580.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2580.0>,<0.2581.0>,nil,<<"1352829685698636">>, <0.2577.0>,<0.2582.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2577.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2577.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2577.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/20">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/20.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.555,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2886.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 538 neighbours: [error_logger:error,2012-11-13T10:01:39.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2580.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 654 neighbours: [error_logger:error,2012-11-13T10:01:39.557,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2592.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2592.0>,<0.2593.0>,nil,<<"1352829685701266">>, <0.2589.0>,<0.2594.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2589.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2589.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2589.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/22">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/22.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.560,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2658.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2658.0>,<0.2659.0>,nil,<<"1352829685715431">>, <0.2655.0>,<0.2660.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2655.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2655.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2655.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/32">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/32.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.563,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2658.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 670 neighbours: [error_logger:error,2012-11-13T10:01:39.564,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2664.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2664.0>,<0.2665.0>,nil,<<"1352829685717309">>, <0.2661.0>,<0.2666.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2661.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2661.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2661.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/33">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/33.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.568,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2664.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 678 neighbours: [error_logger:error,2012-11-13T10:01:39.568,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2604.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2604.0>,<0.2605.0>,nil,<<"1352829685703745">>, <0.2601.0>,<0.2606.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2601.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2601.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2601.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/24">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/24.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2604.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 686 neighbours: [error_logger:error,2012-11-13T10:01:39.574,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2592.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 666 neighbours: [error_logger:error,2012-11-13T10:01:39.575,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:76]** Generic server <0.2730.0> terminating ** Last message in was {'EXIT',<0.2486.0>,killed} ** When Server state == {db,<0.2730.0>,<0.2731.0>,nil,<<"1352829685731704">>, <0.2727.0>,<0.2732.0>, {db_header,11,0,nil,nil, <<0,0,0,0,48,46,0,0,0,0,0,93>>, 0,nil,nil}, 0, {btree,<0.2727.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2727.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.2727.0>, {12334,<<>>,93}, #Fun, #Fun, #Fun,nil,1279,2558, true}, 0,<<"default/43">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/default/43.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2012-11-13T10:01:39.580,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.2730.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.63.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 711 neighbours: [error_logger:error,2012-11-13T10:01:39.860,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,couch_primary_services} Context: child_terminated Reason: normal Offender: [{pid,<0.3714.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:error,2012-11-13T10:01:41.885,ns_1@127.0.0.1:ns_heart<0.3913.0>:ns_heart:grab_samples_loading_tasks:319]Failed to grab samples loader tasks: {exit, {noproc, {gen_server,call, [samples_loader_tasks,get_tasks, 2000]}}, [{gen_server,call,3}, {ns_heart,grab_samples_loading_tasks,0}, {ns_heart,current_status,0}, {ns_heart,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} [ns_server:error,2012-11-13T10:03:12.532,ns_1@127.0.0.1:<0.4284.0>:ns_janitor:cleanup_with_states:92]Bucket "default" not yet ready on ['ns_1@127.0.0.1'] [ns_server:error,2012-11-13T10:03:15.007,ns_1@127.0.0.1:ns_memcached-default<0.4001.0>:ns_memcached:handle_info:630]handle_info(ensure_bucket,..) took too long: 24090341 us [stats:error,2012-11-13T10:35:17.447,ns_1@127.0.0.1:<0.10101.0>:stats_reader:log_bad_responses:191]Some nodes didn't respond: ['ns_1@127.0.0.1'] [error_logger:error,2012-11-13T10:35:17.898,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================SUPERVISOR REPORT========================= Supervisor: {local,menelaus_sup} Context: child_terminated Reason: {noproc, {gen_server,call, [{'stats_reader-beer-sample','ns_1@127.0.0.1'}, {latest,minute,1}]}} Offender: [{pid,<0.3962.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:error,2012-11-13T10:37:13.430,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:72] =========================CRASH REPORT========================= crasher: initial call: samples_loader_tasks:perform_loading_task/1 pid: <0.12893.0> registered_name: [] exception exit: {failed_to_load_samples_with_status,1} in function samples_loader_tasks:perform_loading_task/1 ancestors: [samples_loader_tasks,ns_server_sup,ns_server_cluster_sup, <0.63.0>] messages: [] links: [<0.4011.0>] dictionary: [] trap_exit: false status: running heap_size: 28657 stack_size: 24 reductions: 30019 neighbours: [user:error,2012-11-13T10:37:13.442,ns_1@127.0.0.1:samples_loader_tasks<0.4011.0>:samples_loader_tasks:handle_info:64]Loading sample bucket beer-sample failed: {failed_to_load_samples_with_status, 1} ------------------------------- logs_node (xdcr): ------------------------------- [xdcr:info,2012-11-13T9:56:22.257,ns_1@127.0.0.1:ns_server_sup<0.378.0>:xdc_replication_sup:start_link:25]start XDCR bucket replicator supervisor... [xdcr:info,2012-11-13T9:56:22.299,ns_1@127.0.0.1:ns_server_sup<0.378.0>:xdc_rep_manager:start_link:49]start XDCR replication manager... [xdcr:info,2012-11-13T9:56:22.613,ns_1@127.0.0.1:xdc_rep_manager<0.479.0>:xdc_rep_manager:maybe_create_replication_info_ddoc:92]replication document created: {db,<0.484.0>,<0.485.0>,nil,<<"1352829382613535">>,<0.480.0>,<0.486.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.480.0>,nil,#Fun, #Fun,#Fun, #Fun,1279,2558,true}, {btree,<0.480.0>,nil,#Fun, #Fun, #Fun, #Fun,1279,2558,true}, {btree,<0.480.0>,nil,#Fun, #Fun,#Fun,nil,1279, 2558,true}, 0,<<"_replicator">>, "/Users/farshid/Library/Application Support/Couchbase/var/lib/couchdb/_replicator.couch.1", [],nil, {user_ctx,null,[<<"_admin">>,<<"_replicator">>],undefined}, nil, [before_header,after_header,on_file_open], [create,sys_db, {user_ctx,{user_ctx,null,[<<"_admin">>,<<"_replicator">>],undefined}}]} [xdcr:info,2012-11-13T9:56:22.615,ns_1@127.0.0.1:xdc_rep_manager<0.479.0>:xdc_rep_manager:maybe_create_replication_info_ddoc:108]create XDCR replication info doc... [xdcr:info,2012-11-13T9:56:35.996,ns_1@127.0.0.1:ns_server_sup<0.765.0>:xdc_replication_sup:start_link:25]start XDCR bucket replicator supervisor... [xdcr:info,2012-11-13T9:56:35.997,ns_1@127.0.0.1:ns_server_sup<0.765.0>:xdc_rep_manager:start_link:49]start XDCR replication manager... [xdcr:info,2012-11-13T10:01:29.641,ns_1@127.0.0.1:ns_server_sup<0.3091.0>:xdc_replication_sup:start_link:25]start XDCR bucket replicator supervisor... [xdcr:info,2012-11-13T10:01:29.642,ns_1@127.0.0.1:ns_server_sup<0.3091.0>:xdc_rep_manager:start_link:49]start XDCR replication manager... [xdcr:info,2012-11-13T10:01:41.912,ns_1@127.0.0.1:ns_server_sup<0.3887.0>:xdc_replication_sup:start_link:25]start XDCR bucket replicator supervisor... [xdcr:info,2012-11-13T10:01:41.919,ns_1@127.0.0.1:ns_server_sup<0.3887.0>:xdc_rep_manager:start_link:49]start XDCR replication manager... ------------------------------- logs_node (couchdb): ------------------------------- ------------------------------- logs_node (views): ------------------------------- [views:debug,2012-11-13T10:00:19.456,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: active (0) [views:debug,2012-11-13T10:00:19.827,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: active (0) [views:debug,2012-11-13T10:00:19.935,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: active (0) [views:debug,2012-11-13T10:00:20.106,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: active (0) [views:debug,2012-11-13T10:00:20.203,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: active (0) [views:debug,2012-11-13T10:00:20.304,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: active (0) [views:debug,2012-11-13T10:00:20.404,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: active (0) [views:debug,2012-11-13T10:00:20.505,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: active (0) [views:debug,2012-11-13T10:00:20.605,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: active (0) [views:debug,2012-11-13T10:00:20.717,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: active (0) [views:debug,2012-11-13T10:00:20.818,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: active (0) [views:debug,2012-11-13T10:00:20.930,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: active (0) [views:debug,2012-11-13T10:00:21.044,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: active (0) [views:debug,2012-11-13T10:00:21.166,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: active (0) [views:debug,2012-11-13T10:00:21.295,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: active (0) [views:debug,2012-11-13T10:00:21.487,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: active (0) [views:debug,2012-11-13T10:00:21.625,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: active (0) [views:debug,2012-11-13T10:00:21.747,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: active (0) [views:debug,2012-11-13T10:00:21.859,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: active (0) [views:debug,2012-11-13T10:00:21.982,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: active (0) [views:debug,2012-11-13T10:00:22.094,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: active (0) [views:debug,2012-11-13T10:00:22.206,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: active (0) [views:debug,2012-11-13T10:00:22.324,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: active (0) [views:debug,2012-11-13T10:00:22.430,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: active (0) [views:debug,2012-11-13T10:00:22.541,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: active (0) [views:debug,2012-11-13T10:00:22.643,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: active (0) [views:debug,2012-11-13T10:00:22.754,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: active (0) [views:debug,2012-11-13T10:00:22.857,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: active (0) [views:debug,2012-11-13T10:00:22.947,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: active (0) [views:debug,2012-11-13T10:00:23.047,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: active (0) [views:debug,2012-11-13T10:00:23.148,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: active (0) [views:debug,2012-11-13T10:00:23.249,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: active (0) [views:debug,2012-11-13T10:00:23.349,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: active (0) [views:debug,2012-11-13T10:00:23.451,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: active (0) [views:debug,2012-11-13T10:00:23.564,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: active (0) [views:debug,2012-11-13T10:00:23.675,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: active (0) [views:debug,2012-11-13T10:00:23.787,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: active (0) [views:debug,2012-11-13T10:00:23.887,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: active (0) [views:debug,2012-11-13T10:00:24.011,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: active (0) [views:debug,2012-11-13T10:00:24.111,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: active (0) [views:debug,2012-11-13T10:00:24.212,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: active (0) [views:debug,2012-11-13T10:00:24.314,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: active (0) [views:debug,2012-11-13T10:00:24.437,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: active (0) [views:debug,2012-11-13T10:00:24.538,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: active (0) [views:debug,2012-11-13T10:00:24.650,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: active (0) [views:debug,2012-11-13T10:00:24.750,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: active (0) [views:debug,2012-11-13T10:00:24.863,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: active (0) [views:debug,2012-11-13T10:00:24.964,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: active (0) [views:debug,2012-11-13T10:00:25.075,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: active (0) [views:debug,2012-11-13T10:00:25.181,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: active (0) [views:debug,2012-11-13T10:00:25.288,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: active (0) [views:debug,2012-11-13T10:00:25.389,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: active (0) [views:debug,2012-11-13T10:00:25.534,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: active (0) [views:debug,2012-11-13T10:00:25.669,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: active (0) [views:debug,2012-11-13T10:00:25.860,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: active (0) [views:debug,2012-11-13T10:00:25.961,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: active (0) [views:debug,2012-11-13T10:00:26.107,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: active (0) [views:debug,2012-11-13T10:00:26.308,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: active (0) [views:debug,2012-11-13T10:00:26.420,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: active (0) [views:debug,2012-11-13T10:00:26.532,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: active (0) [views:debug,2012-11-13T10:00:26.633,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: active (0) [views:debug,2012-11-13T10:00:26.734,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: active (0) [views:debug,2012-11-13T10:00:26.846,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: active (0) [views:debug,2012-11-13T10:00:26.935,ns_1@127.0.0.1:mc_couch_events<0.836.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: active (0) [views:debug,2012-11-13T10:01:30.720,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.722,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.725,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.727,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.729,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.732,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.735,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.737,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.740,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.743,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.747,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.750,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.753,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.756,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.759,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.762,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.765,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.769,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.772,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.776,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.782,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.786,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.789,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.793,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.797,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.800,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.803,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.806,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.809,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.812,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.815,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.817,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.820,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.823,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.829,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.832,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.834,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.837,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.841,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.845,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.848,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.851,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.854,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.857,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.860,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.864,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.869,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.872,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.875,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.878,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.883,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.886,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.889,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.894,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.897,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.900,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.903,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.907,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.913,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.918,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.921,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.925,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.931,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: dead (1) [views:debug,2012-11-13T10:01:30.934,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: dead (1) [views:debug,2012-11-13T10:01:31.634,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: active (1) [views:debug,2012-11-13T10:01:31.637,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: active (1) [views:debug,2012-11-13T10:01:31.640,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: active (1) [views:debug,2012-11-13T10:01:31.642,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: active (1) [views:debug,2012-11-13T10:01:31.659,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: active (1) [views:debug,2012-11-13T10:01:31.662,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: active (1) [views:debug,2012-11-13T10:01:31.665,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: active (1) [views:debug,2012-11-13T10:01:31.667,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: active (1) [views:debug,2012-11-13T10:01:31.711,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: active (1) [views:debug,2012-11-13T10:01:31.783,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: active (1) [views:debug,2012-11-13T10:01:31.791,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: active (1) [views:debug,2012-11-13T10:01:31.793,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: active (1) [views:debug,2012-11-13T10:01:31.796,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: active (1) [views:debug,2012-11-13T10:01:31.798,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: active (1) [views:debug,2012-11-13T10:01:31.801,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: active (1) [views:debug,2012-11-13T10:01:31.804,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: active (1) [views:debug,2012-11-13T10:01:31.807,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: active (1) [views:debug,2012-11-13T10:01:31.810,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: active (1) [views:debug,2012-11-13T10:01:31.814,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: active (1) [views:debug,2012-11-13T10:01:31.817,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: active (1) [views:debug,2012-11-13T10:01:31.820,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: active (1) [views:debug,2012-11-13T10:01:31.824,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: active (1) [views:debug,2012-11-13T10:01:31.828,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: active (1) [views:debug,2012-11-13T10:01:31.831,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: active (1) [views:debug,2012-11-13T10:01:31.835,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: active (1) [views:debug,2012-11-13T10:01:31.838,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: active (1) [views:debug,2012-11-13T10:01:31.841,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: active (1) [views:debug,2012-11-13T10:01:31.844,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: active (1) [views:debug,2012-11-13T10:01:31.846,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: active (1) [views:debug,2012-11-13T10:01:31.850,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: active (1) [views:debug,2012-11-13T10:01:31.852,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: active (1) [views:debug,2012-11-13T10:01:31.855,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: active (1) [views:debug,2012-11-13T10:01:31.865,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: active (1) [views:debug,2012-11-13T10:01:31.872,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: active (1) [views:debug,2012-11-13T10:01:31.874,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: active (1) [views:debug,2012-11-13T10:01:31.877,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: active (1) [views:debug,2012-11-13T10:01:31.880,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: active (1) [views:debug,2012-11-13T10:01:31.884,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: active (1) [views:debug,2012-11-13T10:01:31.897,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: active (1) [views:debug,2012-11-13T10:01:31.905,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: active (1) [views:debug,2012-11-13T10:01:31.907,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: active (1) [views:debug,2012-11-13T10:01:31.911,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: active (1) [views:debug,2012-11-13T10:01:31.915,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: active (1) [views:debug,2012-11-13T10:01:31.919,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: active (1) [views:debug,2012-11-13T10:01:31.921,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: active (1) [views:debug,2012-11-13T10:01:31.926,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: active (1) [views:debug,2012-11-13T10:01:31.929,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: active (1) [views:debug,2012-11-13T10:01:31.933,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: active (1) [views:debug,2012-11-13T10:01:31.937,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: active (1) [views:debug,2012-11-13T10:01:31.941,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: active (1) [views:debug,2012-11-13T10:01:31.944,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: active (1) [views:debug,2012-11-13T10:01:31.949,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: active (1) [views:debug,2012-11-13T10:01:31.953,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: active (1) [views:debug,2012-11-13T10:01:31.955,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: active (1) [views:debug,2012-11-13T10:01:31.958,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: active (1) [views:debug,2012-11-13T10:01:31.962,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: active (1) [views:debug,2012-11-13T10:01:31.965,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: active (1) [views:debug,2012-11-13T10:01:31.967,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: active (1) [views:debug,2012-11-13T10:01:31.971,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: active (1) [views:debug,2012-11-13T10:01:31.973,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: active (1) [views:debug,2012-11-13T10:01:31.976,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: active (1) [views:debug,2012-11-13T10:01:31.979,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: active (1) [views:debug,2012-11-13T10:01:31.981,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: active (1) [views:debug,2012-11-13T10:01:31.985,ns_1@127.0.0.1:mc_couch_events<0.3168.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: active (1) [views:debug,2012-11-13T10:01:42.980,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: dead (1) [views:debug,2012-11-13T10:01:42.984,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: dead (1) [views:debug,2012-11-13T10:01:42.987,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: dead (1) [views:debug,2012-11-13T10:01:42.989,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: dead (1) [views:debug,2012-11-13T10:01:42.994,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: dead (1) [views:debug,2012-11-13T10:01:42.998,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.001,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.004,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.007,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.010,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.024,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.026,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.030,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.033,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.036,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.038,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.041,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.044,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.046,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.049,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.052,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.055,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.059,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.063,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.066,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.068,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.071,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.074,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.076,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.079,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.083,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.086,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.088,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.091,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.096,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.100,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.103,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.106,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.109,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.112,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.115,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.118,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.120,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.123,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.126,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.129,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.131,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.134,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.137,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.140,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.144,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.148,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.151,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.154,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.156,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.159,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.162,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.166,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.169,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.172,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.175,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.178,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.184,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.188,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: dead (1) [views:debug,2012-11-13T10:01:43.919,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/63. Updated state: active (1) [views:debug,2012-11-13T10:01:43.922,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/62. Updated state: active (1) [views:debug,2012-11-13T10:01:43.924,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/61. Updated state: active (1) [views:debug,2012-11-13T10:01:43.927,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/60. Updated state: active (1) [views:debug,2012-11-13T10:01:43.931,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/59. Updated state: active (1) [views:debug,2012-11-13T10:01:43.935,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/58. Updated state: active (1) [views:debug,2012-11-13T10:01:43.939,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/57. Updated state: active (1) [views:debug,2012-11-13T10:01:43.943,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/56. Updated state: active (1) [views:debug,2012-11-13T10:01:43.947,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/55. Updated state: active (1) [views:debug,2012-11-13T10:01:43.950,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/54. Updated state: active (1) [views:debug,2012-11-13T10:01:43.954,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/53. Updated state: active (1) [views:debug,2012-11-13T10:01:43.959,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/52. Updated state: active (1) [views:debug,2012-11-13T10:01:43.963,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/51. Updated state: active (1) [views:debug,2012-11-13T10:01:43.966,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/50. Updated state: active (1) [views:debug,2012-11-13T10:01:44.034,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/49. Updated state: active (1) [views:debug,2012-11-13T10:01:44.118,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/48. Updated state: active (1) [views:debug,2012-11-13T10:01:44.127,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/47. Updated state: active (1) [views:debug,2012-11-13T10:01:44.130,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/46. Updated state: active (1) [views:debug,2012-11-13T10:01:44.133,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/45. Updated state: active (1) [views:debug,2012-11-13T10:01:44.136,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/44. Updated state: active (1) [views:debug,2012-11-13T10:01:44.140,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/43. Updated state: active (1) [views:debug,2012-11-13T10:01:44.143,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/42. Updated state: active (1) [views:debug,2012-11-13T10:01:44.146,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/41. Updated state: active (1) [views:debug,2012-11-13T10:01:44.149,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/40. Updated state: active (1) [views:debug,2012-11-13T10:01:44.152,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/39. Updated state: active (1) [views:debug,2012-11-13T10:01:44.154,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/38. Updated state: active (1) [views:debug,2012-11-13T10:01:44.157,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/37. Updated state: active (1) [views:debug,2012-11-13T10:01:44.160,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/36. Updated state: active (1) [views:debug,2012-11-13T10:01:44.163,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/35. Updated state: active (1) [views:debug,2012-11-13T10:01:44.171,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/34. Updated state: active (1) [views:debug,2012-11-13T10:01:44.174,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/33. Updated state: active (1) [views:debug,2012-11-13T10:01:44.178,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/32. Updated state: active (1) [views:debug,2012-11-13T10:01:44.181,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/31. Updated state: active (1) [views:debug,2012-11-13T10:01:44.184,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/30. Updated state: active (1) [views:debug,2012-11-13T10:01:44.193,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/29. Updated state: active (1) [views:debug,2012-11-13T10:01:44.204,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/28. Updated state: active (1) [views:debug,2012-11-13T10:01:44.216,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/27. Updated state: active (1) [views:debug,2012-11-13T10:01:44.225,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/26. Updated state: active (1) [views:debug,2012-11-13T10:01:44.228,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/25. Updated state: active (1) [views:debug,2012-11-13T10:01:44.238,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/24. Updated state: active (1) [views:debug,2012-11-13T10:01:44.249,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/23. Updated state: active (1) [views:debug,2012-11-13T10:01:44.252,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/22. Updated state: active (1) [views:debug,2012-11-13T10:01:44.256,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/21. Updated state: active (1) [views:debug,2012-11-13T10:01:44.260,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/20. Updated state: active (1) [views:debug,2012-11-13T10:01:44.264,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/19. Updated state: active (1) [views:debug,2012-11-13T10:01:44.269,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/18. Updated state: active (1) [views:debug,2012-11-13T10:01:44.273,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/17. Updated state: active (1) [views:debug,2012-11-13T10:01:44.276,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/16. Updated state: active (1) [views:debug,2012-11-13T10:01:44.280,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/15. Updated state: active (1) [views:debug,2012-11-13T10:01:44.283,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/14. Updated state: active (1) [views:debug,2012-11-13T10:01:44.285,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/13. Updated state: active (1) [views:debug,2012-11-13T10:01:44.289,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/12. Updated state: active (1) [views:debug,2012-11-13T10:01:44.292,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/11. Updated state: active (1) [views:debug,2012-11-13T10:01:44.298,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/10. Updated state: active (1) [views:debug,2012-11-13T10:01:44.301,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/9. Updated state: active (1) [views:debug,2012-11-13T10:01:44.305,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/8. Updated state: active (1) [views:debug,2012-11-13T10:01:44.308,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/7. Updated state: active (1) [views:debug,2012-11-13T10:01:44.314,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/6. Updated state: active (1) [views:debug,2012-11-13T10:01:44.317,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/5. Updated state: active (1) [views:debug,2012-11-13T10:01:44.321,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/4. Updated state: active (1) [views:debug,2012-11-13T10:01:44.324,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/3. Updated state: active (1) [views:debug,2012-11-13T10:01:44.327,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/2. Updated state: active (1) [views:debug,2012-11-13T10:01:44.330,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/1. Updated state: active (1) [views:debug,2012-11-13T10:01:44.333,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for default/0. Updated state: active (1) [views:debug,2012-11-13T10:35:18.681,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/63. Updated state: active (0) [views:debug,2012-11-13T10:35:19.077,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/62. Updated state: active (0) [views:debug,2012-11-13T10:35:19.346,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/61. Updated state: active (0) [views:debug,2012-11-13T10:35:19.592,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/60. Updated state: active (0) [views:debug,2012-11-13T10:35:19.694,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/59. Updated state: active (0) [views:debug,2012-11-13T10:35:19.828,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/58. Updated state: active (0) [views:debug,2012-11-13T10:35:19.929,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/57. Updated state: active (0) [views:debug,2012-11-13T10:35:20.029,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/56. Updated state: active (0) [views:debug,2012-11-13T10:35:20.142,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/55. Updated state: active (0) [views:debug,2012-11-13T10:35:20.303,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/54. Updated state: active (0) [views:debug,2012-11-13T10:35:20.466,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/53. Updated state: active (0) [views:debug,2012-11-13T10:35:20.607,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/52. Updated state: active (0) [views:debug,2012-11-13T10:35:20.764,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/51. Updated state: active (0) [views:debug,2012-11-13T10:35:20.963,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/50. Updated state: active (0) [views:debug,2012-11-13T10:35:21.134,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/49. Updated state: active (0) [views:debug,2012-11-13T10:35:21.303,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/48. Updated state: active (0) [views:debug,2012-11-13T10:35:21.518,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/47. Updated state: active (0) [views:debug,2012-11-13T10:35:21.697,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/46. Updated state: active (0) [views:debug,2012-11-13T10:35:21.832,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/45. Updated state: active (0) [views:debug,2012-11-13T10:35:21.989,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/44. Updated state: active (0) [views:debug,2012-11-13T10:35:22.241,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/43. Updated state: active (0) [views:debug,2012-11-13T10:35:22.401,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/42. Updated state: active (0) [views:debug,2012-11-13T10:35:22.525,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/41. Updated state: active (0) [views:debug,2012-11-13T10:35:22.652,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/40. Updated state: active (0) [views:debug,2012-11-13T10:35:22.760,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/39. Updated state: active (0) [views:debug,2012-11-13T10:35:22.871,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/38. Updated state: active (0) [views:debug,2012-11-13T10:35:23.018,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/37. Updated state: active (0) [views:debug,2012-11-13T10:35:23.131,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/36. Updated state: active (0) [views:debug,2012-11-13T10:35:23.239,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/35. Updated state: active (0) [views:debug,2012-11-13T10:35:23.391,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/34. Updated state: active (0) [views:debug,2012-11-13T10:35:23.490,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/33. Updated state: active (0) [views:debug,2012-11-13T10:35:23.685,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/32. Updated state: active (0) [views:debug,2012-11-13T10:35:23.801,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/31. Updated state: active (0) [views:debug,2012-11-13T10:35:23.949,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/30. Updated state: active (0) [views:debug,2012-11-13T10:35:24.099,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/29. Updated state: active (0) [views:debug,2012-11-13T10:35:24.309,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/28. Updated state: active (0) [views:debug,2012-11-13T10:35:24.496,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/27. Updated state: active (0) [views:debug,2012-11-13T10:35:24.666,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/26. Updated state: active (0) [views:debug,2012-11-13T10:35:24.849,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/25. Updated state: active (0) [views:debug,2012-11-13T10:35:25.010,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/24. Updated state: active (0) [views:debug,2012-11-13T10:35:25.167,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/23. Updated state: active (0) [views:debug,2012-11-13T10:35:25.312,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/22. Updated state: active (0) [views:debug,2012-11-13T10:35:25.437,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/21. Updated state: active (0) [views:debug,2012-11-13T10:35:25.591,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/20. Updated state: active (0) [views:debug,2012-11-13T10:35:25.751,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/19. Updated state: active (0) [views:debug,2012-11-13T10:35:25.873,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/18. Updated state: active (0) [views:debug,2012-11-13T10:35:26.055,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/17. Updated state: active (0) [views:debug,2012-11-13T10:35:26.245,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/16. Updated state: active (0) [views:debug,2012-11-13T10:35:26.548,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/15. Updated state: active (0) [views:debug,2012-11-13T10:35:26.788,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/14. Updated state: active (0) [views:debug,2012-11-13T10:35:27.168,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/13. Updated state: active (0) [views:debug,2012-11-13T10:35:27.825,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/12. Updated state: active (0) [views:debug,2012-11-13T10:35:28.303,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/11. Updated state: active (0) [views:debug,2012-11-13T10:35:28.645,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/10. Updated state: active (0) [views:debug,2012-11-13T10:35:29.263,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/9. Updated state: active (0) [views:debug,2012-11-13T10:35:29.867,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/8. Updated state: active (0) [views:debug,2012-11-13T10:35:30.247,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/7. Updated state: active (0) [views:debug,2012-11-13T10:35:30.521,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/6. Updated state: active (0) [views:debug,2012-11-13T10:35:30.806,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/5. Updated state: active (0) [views:debug,2012-11-13T10:35:31.142,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/4. Updated state: active (0) [views:debug,2012-11-13T10:35:31.637,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/3. Updated state: active (0) [views:debug,2012-11-13T10:35:32.145,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/2. Updated state: active (0) [views:debug,2012-11-13T10:35:32.821,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/1. Updated state: active (0) [views:debug,2012-11-13T10:35:33.101,ns_1@127.0.0.1:mc_couch_events<0.3964.0>:capi_set_view_manager:handle_mc_couch_event:529]Got set_vbucket event for beer-sample/0. Updated state: active (0) [views:debug,2012-11-13T10:36:06.189,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:maybe_define_group:419] Calling couch_set_view:define_group([<<"beer-sample">>,<<"_design/beer">>, {set_view_params,64,[],[],true}]) [views:debug,2012-11-13T10:36:07.160,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:maybe_define_group:419] couch_set_view:define_group([<<"beer-sample">>,<<"_design/beer">>, {set_view_params,64,[],[],true}]) returned ok in 970ms [views:debug,2012-11-13T10:36:07.173,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:465] Calling couch_set_view:mark_partitions_indexable([<<"beer-sample">>, <<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11, 12,13,14,15,16,17,18,19,20, 21,22,23,24,25,26,27,28,29, 30,31,32,33,34,35,36,37,38, 39,40,41,42,43,44,45,46,47, 48,49,50,51,52,53,54,55,56, 57,58,59,60,61,62,63]]) [views:debug,2012-11-13T10:36:07.174,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:466] couch_set_view:mark_partitions_indexable([<<"beer-sample">>, <<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14, 15,16,17,18,19,20,21,22,23,24,25, 26,27,28,29,30,31,32,33,34,35,36, 37,38,39,40,41,42,43,44,45,46,47, 48,49,50,51,52,53,54,55,56,57,58, 59,60,61,62,63]]) returned ok in 0ms [views:debug,2012-11-13T10:36:07.174,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:471] Calling couch_set_view:set_partition_states([<<"beer-sample">>, <<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11,12,13, 14,15,16,17,18,19,20,21,22,23, 24,25,26,27,28,29,30,31,32,33, 34,35,36,37,38,39,40,41,42,43, 44,45,46,47,48,49,50,51,52,53, 54,55,56,57,58,59,60,61,62,63], [],[]]) [views:debug,2012-11-13T10:36:07.276,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:472] couch_set_view:set_partition_states([<<"beer-sample">>,<<"_design/beer">>, [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, 16,17,18,19,20,21,22,23,24,25,26,27,28, 29,30,31,32,33,34,35,36,37,38,39,40,41, 42,43,44,45,46,47,48,49,50,51,52,53,54, 55,56,57,58,59,60,61,62,63], [],[]]) returned ok in 101ms [views:debug,2012-11-13T10:36:07.277,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:476] Calling couch_set_view:add_replica_partitions([<<"beer-sample">>, <<"_design/beer">>,[]]) [views:debug,2012-11-13T10:36:07.277,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:477] couch_set_view:add_replica_partitions([<<"beer-sample">>,<<"_design/beer">>, []]) returned ok in 0ms [views:debug,2012-11-13T10:36:07.278,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:478] Calling couch_set_view:remove_replica_partitions([<<"beer-sample">>, <<"_design/beer">>,[]]) [views:debug,2012-11-13T10:36:07.278,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:479] couch_set_view:remove_replica_partitions([<<"beer-sample">>, <<"_design/beer">>,[]]) returned ok in 0ms [views:debug,2012-11-13T10:36:07.278,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:488] Calling couch_set_view:mark_partitions_unindexable([<<"beer-sample">>, <<"_design/beer">>,[]]) [views:debug,2012-11-13T10:36:07.279,ns_1@127.0.0.1:capi_set_view_manager-beer-sample<0.12901.0>:capi_set_view_manager:apply_index_states:489] couch_set_view:mark_partitions_unindexable([<<"beer-sample">>, <<"_design/beer">>,[]]) returned ok in 0ms ------------------------------- logs_node (mapreduce_errors): ------------------------------- ------------------------------- memcached logs: ------------------------------- Tue Nov 13 10:00:18.225294 PST 3: Trying to connect to mccouch: "localhost:11213" Tue Nov 13 10:00:18.226492 PST 3: Connected to mccouch: "localhost:11213" Tue Nov 13 10:00:18.273915 PST 3: Warning: failed to load the engine session stats due to IO exception "basic_ios::clear" Tue Nov 13 10:00:18.274020 PST 3: Failed to load mutation log, falling back to key dump Tue Nov 13 10:00:18.274063 PST 3: Extension support isn't implemented in this version of bucket_engine Tue Nov 13 10:00:18.274235 PST 3: metadata loaded in 24 ms Tue Nov 13 10:00:18.299836 PST 3: warmup completed in 24 ms Tue Nov 13 10:01:24.491710 PST 3: Shutting down tap connections! Tue Nov 13 10:01:24.492891 PST 3: Had to wait 1081 usec for shutdown Tue Nov 13 10:01:30.690386 PST 3: Trying to connect to mccouch: "localhost:11213" Tue Nov 13 10:01:30.691887 PST 3: Connected to mccouch: "localhost:11213" Tue Nov 13 10:01:30.701758 PST 3: Extension support isn't implemented in this version of bucket_engine Tue Nov 13 10:01:30.708314 PST 3: Failed to load mutation log, falling back to key dump Tue Nov 13 10:01:30.714911 PST 3: metadata loaded in 21 ms Tue Nov 13 10:01:30.718107 PST 3: warmup completed in 24 ms Tue Nov 13 10:01:38.822590 PST 3: Shutting down tap connections! Tue Nov 13 10:01:38.823826 PST 3: Had to wait 1154 usec for shutdown Tue Nov 13 10:01:42.951086 PST 3: Trying to connect to mccouch: "localhost:11213" Tue Nov 13 10:01:42.952552 PST 3: Connected to mccouch: "localhost:11213" Tue Nov 13 10:01:42.960960 PST 3: Extension support isn't implemented in this version of bucket_engine Tue Nov 13 10:01:42.965552 PST 3: Failed to load mutation log, falling back to key dump Tue Nov 13 10:01:42.972721 PST 3: metadata loaded in 18 ms Tue Nov 13 10:01:42.976599 PST 3: warmup completed in 21 ms Tue Nov 13 10:27:34.334628 PST 3: Shutting down tap connections! Tue Nov 13 10:35:17.912626 PST 3: Trying to connect to mccouch: "localhost:11213" Tue Nov 13 10:35:17.941540 PST 3: Connected to mccouch: "localhost:11213" Tue Nov 13 10:35:17.991827 PST 3: Warning: failed to load the engine session stats due to IO exception "basic_ios::clear" Tue Nov 13 10:35:17.992033 PST 3: Failed to load mutation log, falling back to key dump Tue Nov 13 10:35:17.992232 PST 3: Extension support isn't implemented in this version of bucket_engine Tue Nov 13 10:35:17.992439 PST 3: metadata loaded in 14 ms Tue Nov 13 10:35:17.992989 PST 3: warmup completed in 14 ms Tue Nov 13 10:36:43.366118 PST 3: Shutting down tap connections!