add query stats for new hash_writer

This commit is contained in:
Kevin Lynx 2013-07-21 22:20:47 +08:00
parent e46c264056
commit 980c6cad57
2 changed files with 15 additions and 4 deletions

View File

@ -95,11 +95,14 @@ do_save_merge(0) ->
do_save_merge(_) -> do_save_merge(_) ->
First = ets:first(?TBLNAME), First = ets:first(?TBLNAME),
ReqAt = time_util:now_seconds(), ReqAt = time_util:now_seconds(),
do_save(First, ReqAt), {ReqSum, NewSum} = do_save(First, ReqAt),
Conn = mongo_pool:get(?DBPOOL),
db_system:stats_cache_query_inserted(Conn, NewSum),
db_system:stats_query_inserted(Conn, ReqSum),
ets:delete_all_objects(?TBLNAME). ets:delete_all_objects(?TBLNAME).
do_save('$end_of_table', _) -> do_save('$end_of_table', _) ->
0; {0, 0};
do_save(Key, ReqAt) -> do_save(Key, ReqAt) ->
Conn = mongo_pool:get(?DBPOOL), Conn = mongo_pool:get(?DBPOOL),
ReqCnt = get_req_cnt(Key), ReqCnt = get_req_cnt(Key),
@ -107,11 +110,18 @@ do_save(Key, ReqAt) ->
Cmd = {findAndModify, ?HASH_COLLNAME, query, {'_id', BHash}, Cmd = {findAndModify, ?HASH_COLLNAME, query, {'_id', BHash},
update, {'$inc', {req_cnt, ReqCnt}, '$set', {req_at, ReqAt}}, update, {'$inc', {req_cnt, ReqCnt}, '$set', {req_at, ReqAt}},
fields, {'_id', 1}, upsert, true, new, false}, fields, {'_id', 1}, upsert, true, new, false},
mongo:do(safe, master, Conn, ?HASH_DBNAME, fun() -> Ret = mongo:do(safe, master, Conn, ?HASH_DBNAME, fun() ->
mongo:command(Cmd) mongo:command(Cmd)
end), end),
New = case Ret of
{value, _Obj, lastErrorObject, {updatedExisting, true, n, 1}, ok, 1.0} ->
0;
_ ->
1
end,
Next = ets:next(?TBLNAME, Key), Next = ets:next(?TBLNAME, Key),
ReqCnt + do_save(Next, ReqAt). {ReqSum, NewSum} = do_save(Next, ReqAt),
{ReqCnt + ReqSum, New + NewSum}.
%% old method %% old method
do_save(0) -> do_save(0) ->

View File

@ -106,6 +106,7 @@ schedule_download(Conn, Pid, Hash) ->
try_download(false, _, _, Hash) -> try_download(false, _, _, Hash) ->
?T(?FMT("hash does not exist in index_cache, filter it ~s", [Hash])), ?T(?FMT("hash does not exist in index_cache, filter it ~s", [Hash])),
hash_reader_stats:handle_cache_filtered(),
0; 0;
try_download(true, Conn, Pid, Hash) -> try_download(true, Conn, Pid, Hash) ->
case loc_torrent_cache:load(Conn, Hash) of case loc_torrent_cache:load(Conn, Hash) of