mirror of
https://github.com/btdig/dhtcrawler2.git
synced 2025-02-23 21:59:04 +00:00
change http startup
This commit is contained in:
parent
46338b02c6
commit
ae9f2f0c8e
@ -12,28 +12,29 @@
|
|||||||
code_change/3,
|
code_change/3,
|
||||||
terminate/2]).
|
terminate/2]).
|
||||||
-export([start/0,
|
-export([start/0,
|
||||||
start/3,
|
start/4,
|
||||||
start/1,
|
start/1,
|
||||||
page_temp/0,
|
page_temp/0,
|
||||||
stop/0]).
|
stop/0]).
|
||||||
-record(state, {html_temp, httpid}).
|
-record(state, {html_temp, httpid}).
|
||||||
|
|
||||||
% start from command line, erl -run crawler_http start localhost 27017 8000
|
% start from command line, erl -run crawler_http start localhost 27017 8000 5
|
||||||
start([DBHostS, DBPortS, PortS]) ->
|
start([DBHostS, DBPortS, PortS, PoolSizeS]) ->
|
||||||
DBHost = DBHostS,
|
DBHost = DBHostS,
|
||||||
DBPort = list_to_integer(DBPortS),
|
DBPort = list_to_integer(DBPortS),
|
||||||
HttpPort = list_to_integer(PortS),
|
HttpPort = list_to_integer(PortS),
|
||||||
start(DBHost, DBPort, HttpPort).
|
PoolSize = list_to_integer(PoolSizeS),
|
||||||
|
start(DBHost, DBPort, HttpPort, PoolSize).
|
||||||
|
|
||||||
start(DBHost, DBPort, Port) ->
|
start(DBHost, DBPort, Port, PoolSize) ->
|
||||||
code:add_path("deps/bson/ebin"),
|
code:add_path("deps/bson/ebin"),
|
||||||
code:add_path("deps/mongodb/ebin"),
|
code:add_path("deps/mongodb/ebin"),
|
||||||
Apps = [crypto, public_key, ssl, inets, bson, mongodb],
|
Apps = [crypto, public_key, ssl, inets, bson, mongodb],
|
||||||
[application:start(App) || App <- Apps],
|
[application:start(App) || App <- Apps],
|
||||||
gen_server:start({local, srv_name()}, ?MODULE, [DBHost, DBPort, Port], []).
|
gen_server:start({local, srv_name()}, ?MODULE, [DBHost, DBPort, Port, PoolSize], []).
|
||||||
|
|
||||||
start() ->
|
start() ->
|
||||||
start(localhost, 27017, 8000).
|
start(localhost, 27017, 8000, 5).
|
||||||
|
|
||||||
stop() ->
|
stop() ->
|
||||||
gen_server:cast(srv_name(), stop).
|
gen_server:cast(srv_name(), stop).
|
||||||
@ -44,9 +45,9 @@ page_temp() ->
|
|||||||
srv_name() ->
|
srv_name() ->
|
||||||
crawler_http.
|
crawler_http.
|
||||||
|
|
||||||
init([DBHost, DBPort, Port]) ->
|
init([DBHost, DBPort, Port, PoolSize]) ->
|
||||||
process_flag(trap_exit, true),
|
process_flag(trap_exit, true),
|
||||||
db_frontend:start(DBHost, DBPort, 2),
|
db_frontend:start(DBHost, DBPort, PoolSize),
|
||||||
http_cache:start_link(),
|
http_cache:start_link(),
|
||||||
{ok, Pid} = inets:start(httpd, [
|
{ok, Pid} = inets:start(httpd, [
|
||||||
{modules, [mod_alias, mod_auth, mod_esi, mod_actions,
|
{modules, [mod_alias, mod_auth, mod_esi, mod_actions,
|
||||||
|
@ -93,9 +93,12 @@ query(Type, State) ->
|
|||||||
end.
|
end.
|
||||||
|
|
||||||
update(Type, #state{cache = Cache} = State) ->
|
update(Type, #state{cache = Cache} = State) ->
|
||||||
|
Start = now(),
|
||||||
|
io:format("sync update cache ~p start~n", [Type]),
|
||||||
Ret = do_update(Type),
|
Ret = do_update(Type),
|
||||||
Val = {now(), Ret},
|
Val = {now(), Ret},
|
||||||
io:format("sync update cache ~p~n", [Type]),
|
io:format("sync update cache ~p done used ~p ms~n", [Type,
|
||||||
|
timer:now_diff(now(), Start) div 1000]),
|
||||||
NewCache = gb_trees:enter(Type, Val, Cache),
|
NewCache = gb_trees:enter(Type, Val, Cache),
|
||||||
case gb_trees:size(NewCache) >= ?CACHE_SIZE of
|
case gb_trees:size(NewCache) >= ?CACHE_SIZE of
|
||||||
true ->
|
true ->
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
erl -pa ebin -noshell -run crawler_http start localhost 27017 8000
|
erl -pa ebin -noshell -run crawler_http start localhost 27017 8000 5
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user