mirror of
https://github.com/btdig/dhtcrawler2.git
synced 2025-01-31 10:31:37 +00:00
system stats adjust, add more stats to http front-end
This commit is contained in:
parent
2a9f99940a
commit
cb914fe609
@ -10,8 +10,8 @@
|
|||||||
-export([stats_new_saved/1,
|
-export([stats_new_saved/1,
|
||||||
stats_updated/1,
|
stats_updated/1,
|
||||||
stats_query_inserted/2,
|
stats_query_inserted/2,
|
||||||
stats_day_at/2,
|
|
||||||
stats_day_at_slave/2,
|
stats_day_at_slave/2,
|
||||||
|
stats_filtered/1,
|
||||||
stats_get_peers/1]).
|
stats_get_peers/1]).
|
||||||
-export([get_torrent_id/1]).
|
-export([get_torrent_id/1]).
|
||||||
-compile(export_all).
|
-compile(export_all).
|
||||||
@ -93,38 +93,27 @@ stats_query_inserted(Conn, Count) ->
|
|||||||
stats_cache_query_inserted(Conn, Count) ->
|
stats_cache_query_inserted(Conn, Count) ->
|
||||||
stats_inc_field(Conn, inserted_query, Count).
|
stats_inc_field(Conn, inserted_query, Count).
|
||||||
|
|
||||||
|
stats_filtered(Conn) ->
|
||||||
|
stats_inc_field(Conn, filter_hash).
|
||||||
|
|
||||||
stats_inc_field(Conn, Filed) ->
|
stats_inc_field(Conn, Filed) ->
|
||||||
stats_inc_field(Conn, Filed, 1).
|
stats_inc_field(Conn, Filed, 1).
|
||||||
|
|
||||||
stats_inc_field(Conn, Filed, Inc) ->
|
stats_inc_field(Conn, Field, Inc) ->
|
||||||
TodaySecs = time_util:now_day_seconds(),
|
TodaySecs = time_util:now_day_seconds(),
|
||||||
mongo:do(unsafe, master, Conn, ?DBNAME, fun() ->
|
Cmd = {findAndModify, ?STATS_COLLNAME, query, {'_id', TodaySecs},
|
||||||
Doc = stats_ensure_today(TodaySecs),
|
upsert, true, update, {'$inc', {Field, Inc}}, field, {'_id', 1}},
|
||||||
{Val} = bson:lookup(Filed, Doc),
|
|
||||||
NewDoc = bson:update(Filed, Val + Inc, Doc),
|
|
||||||
mongo:update(?STATS_COLLNAME, {'_id', TodaySecs}, NewDoc)
|
|
||||||
end).
|
|
||||||
|
|
||||||
stats_day_at(Conn, DaySec) ->
|
|
||||||
mongo:do(safe, master, Conn, ?DBNAME, fun() ->
|
mongo:do(safe, master, Conn, ?DBNAME, fun() ->
|
||||||
stats_ensure_today(DaySec)
|
mongo:command(Cmd)
|
||||||
end).
|
end).
|
||||||
|
|
||||||
stats_day_at_slave(Conn, DaySec) ->
|
stats_day_at_slave(Conn, DaySec) ->
|
||||||
mongo:do(safe, slave_ok, Conn, ?DBNAME, fun() ->
|
Ret = mongo:do(safe, slave_ok, Conn, ?DBNAME, fun() ->
|
||||||
stats_ensure_today(DaySec)
|
mongo:find_one(?STATS_COLLNAME, {'_id', DaySec})
|
||||||
end).
|
end),
|
||||||
|
case Ret of
|
||||||
stats_ensure_today(TodaySecs) ->
|
{} -> {};
|
||||||
case mongo:find_one(?STATS_COLLNAME, {'_id', TodaySecs}) of
|
{Doc} -> Doc
|
||||||
{} ->
|
|
||||||
NewDoc = {'_id', TodaySecs, get_peers, 0, get_peers_query, 0,
|
|
||||||
inserted_query, 0, % because has_cache_writer will merge some queries
|
|
||||||
updated, 0, new_saved, 0},
|
|
||||||
mongo:insert(?STATS_COLLNAME, NewDoc),
|
|
||||||
NewDoc;
|
|
||||||
{Doc} ->
|
|
||||||
Doc
|
|
||||||
end.
|
end.
|
||||||
|
|
||||||
%%
|
%%
|
||||||
|
@ -104,8 +104,9 @@ schedule_download(Conn, Pid, Hash) ->
|
|||||||
end,
|
end,
|
||||||
try_download(Down, Conn, Pid, Hash).
|
try_download(Down, Conn, Pid, Hash).
|
||||||
|
|
||||||
try_download(false, _, _, Hash) ->
|
try_download(false, Conn, _, Hash) ->
|
||||||
?T(?FMT("hash does not exist in index_cache, filter it ~s", [Hash])),
|
?T(?FMT("hash does not exist in index_cache, filter it ~s", [Hash])),
|
||||||
|
db_system:stats_filtered(Conn),
|
||||||
hash_reader_stats:handle_cache_filtered(),
|
hash_reader_stats:handle_cache_filtered(),
|
||||||
0;
|
0;
|
||||||
try_download(true, Conn, Pid, Hash) ->
|
try_download(true, Conn, Pid, Hash) ->
|
||||||
|
@ -126,9 +126,11 @@ do_format_stats([First|Rest]) ->
|
|||||||
S = [format_stats(First)] ++ ["," ++ format_stats(Stats) || Stats <- Rest],
|
S = [format_stats(First)] ++ ["," ++ format_stats(Stats) || Stats <- Rest],
|
||||||
lists:flatten(S).
|
lists:flatten(S).
|
||||||
|
|
||||||
format_stats({DaySec, Processed, RecvQuery, Updated, New}) ->
|
format_stats(Stats) ->
|
||||||
?TEXT("{\"day_secs\":~p, \"recv\":~p, \"process\":~p, \"update\":~p, \"new\":~p}",
|
Vals = http_common:stats_to_list(Stats),
|
||||||
[DaySec, RecvQuery, Processed, Updated, New]).
|
?TEXT("{\"day_secs\":~p, \"recv\":~p, \"process\":~p, \"update\":~p, \"new\":~p,
|
||||||
|
\"unique\":~p, \"filtered\":~p}",
|
||||||
|
Vals).
|
||||||
|
|
||||||
%%
|
%%
|
||||||
test_search(Keyword) ->
|
test_search(Keyword) ->
|
||||||
|
@ -54,15 +54,7 @@ stats() ->
|
|||||||
D1 = db_system:stats_day_at_slave(Conn, DaySecs),
|
D1 = db_system:stats_day_at_slave(Conn, DaySecs),
|
||||||
D2 = db_system:stats_day_at_slave(Conn, DaySecs - ?ONEDAY_SECS),
|
D2 = db_system:stats_day_at_slave(Conn, DaySecs - ?ONEDAY_SECS),
|
||||||
D3 = db_system:stats_day_at_slave(Conn, DaySecs - 2 * ?ONEDAY_SECS),
|
D3 = db_system:stats_day_at_slave(Conn, DaySecs - 2 * ?ONEDAY_SECS),
|
||||||
{TorSum, [decode_stats(D1), decode_stats(D2), decode_stats(D3)]}.
|
{TorSum, [D1, D2, D3]}.
|
||||||
|
|
||||||
decode_stats(Stats) ->
|
|
||||||
{DaySec} = bson:lookup('_id', Stats),
|
|
||||||
{Processed} = bson:lookup(get_peers, Stats),
|
|
||||||
{RecvQuery} = bson:lookup(get_peers_query, Stats),
|
|
||||||
{Updated} = bson:lookup(updated, Stats),
|
|
||||||
{New} = bson:lookup(new_saved, Stats),
|
|
||||||
{DaySec, Processed, RecvQuery, Updated, New}.
|
|
||||||
|
|
||||||
% test only
|
% test only
|
||||||
all_top() ->
|
all_top() ->
|
||||||
|
@ -8,6 +8,8 @@
|
|||||||
get_view_hash/1,
|
get_view_hash/1,
|
||||||
remote_addr/1,
|
remote_addr/1,
|
||||||
list_to_utf_binary/1,
|
list_to_utf_binary/1,
|
||||||
|
lookup_stats_item/2,
|
||||||
|
stats_to_list/1,
|
||||||
sort_file_by_size/1]).
|
sort_file_by_size/1]).
|
||||||
|
|
||||||
remote_addr(Env) ->
|
remote_addr(Env) ->
|
||||||
@ -42,4 +44,13 @@ list_to_utf_binary(L) ->
|
|||||||
US = unicode:characters_to_binary(UL),
|
US = unicode:characters_to_binary(UL),
|
||||||
US.
|
US.
|
||||||
|
|
||||||
|
lookup_stats_item(Stats, Field) ->
|
||||||
|
case bson:lookup(Field, Stats) of
|
||||||
|
{} -> 0;
|
||||||
|
{Val} -> Val
|
||||||
|
end.
|
||||||
|
|
||||||
|
stats_to_list(Stats) ->
|
||||||
|
Fileds = ['_id', get_peers_query, get_peers, updated, new_saved,
|
||||||
|
inserted_query, filter_hash],
|
||||||
|
[lookup_stats_item(Stats, F) || F <- Fileds].
|
||||||
|
@ -152,9 +152,11 @@ format_magnet(MagHash) ->
|
|||||||
format_stats([]) ->
|
format_stats([]) ->
|
||||||
[];
|
[];
|
||||||
|
|
||||||
format_stats([{DaySec, Processed, RecvQuery, Updated, New}|Rest]) ->
|
format_stats([Stats|Rest]) ->
|
||||||
?TEXT("<li>~s RecvQuery ~p ProcessedQuery ~p Updated ~p New ~p</li>",
|
[DaySec|Vals] = http_common:stats_to_list(Stats),
|
||||||
[format_date_string(DaySec), RecvQuery, Processed, Updated, New]) ++
|
?TEXT("<li>~s RecvQuery ~p ProcessedQuery ~p Updated ~p <b>New ~p</b>
|
||||||
|
UniqueQuery ~p CacheFiltered ~p</li>",
|
||||||
|
[format_date_string(DaySec)|Vals]) ++
|
||||||
format_stats(Rest).
|
format_stats(Rest).
|
||||||
|
|
||||||
format_time_string(Secs) ->
|
format_time_string(Secs) ->
|
||||||
|
Loading…
Reference in New Issue
Block a user