diff --git a/src/db_store_mongo.erl b/src/db_store_mongo.erl index 389d585..c1c50de 100644 --- a/src/db_store_mongo.erl +++ b/src/db_store_mongo.erl @@ -25,9 +25,14 @@ init(Host, Port) -> {ok, Conn} = mongo_connection:start_link({Host, Port}), ?I(?FMT("connect mongodb ~p:~p success", [Host, Port])), + init(Conn), + Conn. + +init(Conn) -> enable_text_search(Conn), ensure_search_index(Conn), - Conn. + % TODO: numid index ? + ok. close(Conn) -> mongo_connection:stop(Conn). @@ -89,16 +94,15 @@ index(Conn, Hash) when is_list(Hash) -> end. insert(Conn, Hash, Name, Length, Files) when is_list(Hash) -> - NewDoc = create_torrent_desc(Hash, Name, Length, 1, Files), + NewDoc = create_torrent_desc(Conn, Hash, Name, Length, 1, Files), mongo_do(Conn, fun() -> - %mongo:insert(?COLLNAME, NewDoc) - % since the doc may already exist (inc_announce failed), i update the doc here + % the doc may already exist because the other process has inserted before Sel = {'_id', list_to_binary(Hash)}, mongo:update(?COLLNAME, Sel, NewDoc, true) end). unsafe_insert(Conn, Tors) when is_list(Tors) -> - Docs = [create_torrent_desc(Hash, Name, Length, 1, Files) || + Docs = [create_torrent_desc(Conn, Hash, Name, Length, 1, Files) || {Hash, Name, Length, Files} <- Tors], mongo:do(unsafe, master, Conn, ?DBNAME, fun() -> mongo:insert(?COLLNAME, Docs) @@ -135,7 +139,7 @@ enable_text_search(Conn) -> mongo:command(Cmd) end). -create_torrent_desc(Hash, Name, Length, Announce, Files) -> +create_torrent_desc(Conn, Hash, Name, Length, Announce, Files) -> NameArray = case string_split:split(Name) of {error, L, D} -> ?E(?FMT("string split failed(error): ~p ~p", [L, D])), @@ -146,6 +150,8 @@ create_torrent_desc(Hash, Name, Length, Announce, Files) -> {ok, R} -> R end, {'_id', list_to_binary(Hash), + % steven told me it's necessary for sphinx, what if the doc already exists ? + numid, db_system:get_torrent_id(Conn), name, list_to_binary(Name), name_array, NameArray, length, Length, diff --git a/src/db_system.erl b/src/db_system.erl index c728258..56b1710 100644 --- a/src/db_system.erl +++ b/src/db_system.erl @@ -13,11 +13,30 @@ stats_day_at/2, stats_day_at_slave/2, stats_get_peers/1]). +-export([get_torrent_id/1]). +-compile(export_all). -define(DBNAME, dht_system). -define(COLLNAME, system). -define(HASH_BATCH_KEY, <<"hashbatch">>). -define(STATS_COLLNAME, stats). +-define(TORRENT_ID_KEY, <<"torrentid">>). +% increase the seed and return the new id +get_torrent_id(Conn) -> + Cmd = {findAndModify, ?COLLNAME, query, {'_id', ?TORRENT_ID_KEY}, + update, {'$inc', {seed, 1}}, new, false, upsert, true}, + Ret = mongo:do(safe, master, Conn, ?DBNAME, fun() -> + mongo:command(Cmd) + end), + case bson:lookup(value, Ret) of + {undefined} -> + 0; + {} -> + 0; + {Obj} -> + {Seed} = bson:lookup(seed, Obj), + Seed + end. %% batch index inc_batch_rindex(Conn) -> @@ -104,4 +123,10 @@ stats_ensure_today(TodaySecs) -> Doc end. +%% +test_torrent_id() -> + {ok, Conn} = mongo_connection:start_link({localhost, 27017}), + ID = get_torrent_id(Conn), + ID. + diff --git a/src/hash_reader/db_hash_reader_sup.erl b/src/hash_reader/db_hash_reader_sup.erl index 0f59aa8..955bed0 100644 --- a/src/hash_reader/db_hash_reader_sup.erl +++ b/src/hash_reader/db_hash_reader_sup.erl @@ -45,6 +45,9 @@ start_link(IP, Port, Size) -> start_link(IP, Port, Size, OtherProcess) -> PoolName = mongodb_conn_pool_name, mongo_sup:start_pool(PoolName, 5, {IP, Port}), + % ensure index + Conn = mongo_pool:get(PoolName), + db_store_mongo:init(Conn), supervisor:start_link({local, srv_name()}, ?MODULE, [PoolName, Size, OtherProcess]). srv_name() ->