add a simple page navigation for sphinx_search

This commit is contained in:
Kevin Lynx 2013-07-31 20:56:48 +08:00
parent e1c905b0a7
commit 1d27f2416b
4 changed files with 53 additions and 19 deletions

View File

@ -6,7 +6,7 @@
-module(db_frontend). -module(db_frontend).
-export([start/3, -export([start/3,
search/1, search/1,
search_by_sphinx/1, search_by_sphinx/3,
today_top/0, today_top/0,
search_one/1, search_one/1,
stats/0, stats/0,
@ -28,9 +28,10 @@ search(Keyword) ->
Conn = mongo_pool:get(?DB_POOLNAME), Conn = mongo_pool:get(?DB_POOLNAME),
db_store_mongo:search(Conn, Keyword). db_store_mongo:search(Conn, Keyword).
search_by_sphinx(Keyword) -> search_by_sphinx(Keyword, Page, Count) ->
Offset = Page * Count,
Conn = mongo_pool:get(?DB_POOLNAME), Conn = mongo_pool:get(?DB_POOLNAME),
sphinx_search:search(Conn, Keyword). sphinx_search:search(Conn, Keyword, Offset, Count).
today_top() -> today_top() ->
Conn = mongo_pool:get(?DB_POOLNAME), Conn = mongo_pool:get(?DB_POOLNAME),

View File

@ -5,6 +5,7 @@
%% %%
-module(http_common). -module(http_common).
-export([get_search_keyword/1, -export([get_search_keyword/1,
parse_args/1,
get_view_hash/1, get_view_hash/1,
remote_addr/1, remote_addr/1,
list_to_utf_binary/1, list_to_utf_binary/1,
@ -22,8 +23,7 @@ get_view_hash(Input) ->
get_q_arg(Input). get_q_arg(Input).
get_q_arg(Input) -> get_q_arg(Input) ->
D = urldecode:decode(Input), ReqList = parse_args(Input),
ReqList = httpd:parse_query(D),
case proplists:get_value("q", ReqList) of case proplists:get_value("q", ReqList) of
undefined -> undefined ->
""; "";
@ -31,6 +31,10 @@ get_q_arg(Input) ->
Arg Arg
end. end.
parse_args(Input) ->
D = urldecode:decode(Input),
httpd:parse_query(D).
sort_file_by_size(Files) -> sort_file_by_size(Files) ->
lists:sort(fun({_, L1}, {_, L2}) -> lists:sort(fun({_, L1}, {_, L2}) ->
L1 > L2 L1 > L2

View File

@ -16,6 +16,7 @@
-define(TEXT(Fmt, Args), lists:flatten(io_lib:format(Fmt, Args))). -define(TEXT(Fmt, Args), lists:flatten(io_lib:format(Fmt, Args))).
-import(torrent_file, [size_string/1]). -import(torrent_file, [size_string/1]).
-define(CONTENT_TYPE, "Content-Type: text/html\r\n\r\n"). -define(CONTENT_TYPE, "Content-Type: text/html\r\n\r\n").
-define(COUNT_PER_PAGE, 10).
-include("vlog.hrl"). -include("vlog.hrl").
search(SessionID, Env, Input) -> search(SessionID, Env, Input) ->
@ -37,7 +38,9 @@ sphinx_search(SessionID, Env, Input) ->
Key -> Key ->
US = http_common:list_to_utf_binary(Key), US = http_common:list_to_utf_binary(Key),
?LOG_STR(?INFO, ?FMT("remote ~p search /~s/", [http_common:remote_addr(Env), US])), ?LOG_STR(?INFO, ?FMT("remote ~p search /~s/", [http_common:remote_addr(Env), US])),
{Key, do_search_sphinx(Key)} Args = http_common:parse_args(Input),
Page = case proplists:get_value("p", Args) of undefined -> 0; Val -> list_to_integer(Val) end,
{Key, do_search_sphinx(Key, Page)}
end, end,
Response = simple_html(K, Body), Response = simple_html(K, Body),
mod_esi:deliver(SessionID, [?CONTENT_TYPE, Response]). mod_esi:deliver(SessionID, [?CONTENT_TYPE, Response]).
@ -112,11 +115,34 @@ do_search(Keyword) ->
Body = ?TEXT("<ol>~s</ol>", [lists:flatten(BodyList)]), Body = ?TEXT("<ol>~s</ol>", [lists:flatten(BodyList)]),
Tip ++ Body. Tip ++ Body.
do_search_sphinx(Keyword) -> do_search_sphinx(Keyword, Page) ->
Rets = db_frontend:search_by_sphinx(Keyword), Rets = db_frontend:search_by_sphinx(Keyword, Page, ?COUNT_PER_PAGE + 1),
BodyList = format_search_result(Rets), ThisPage = lists:sublist(Rets, ?COUNT_PER_PAGE),
BodyList = format_search_result(ThisPage),
Body = ?TEXT("<ol>~s</ol>", [lists:flatten(BodyList)]), Body = ?TEXT("<ol>~s</ol>", [lists:flatten(BodyList)]),
Body. Body ++ append_page_nav(Keyword, Page, Rets).
append_page_nav(Key, Page, ThisRet) ->
Nav = case length(ThisRet) of
0 when Page > 0 ->
format_page_nav(Key, Page - 1, "Prev");
0 ->
[];
Size when Page > 0 ->
format_page_nav(Key, Page - 1, "Prev") ++
if Size > ?COUNT_PER_PAGE ->
"|" ++ format_page_nav(Key, Page + 1, "Next");
true -> []
end;
Size ->
if Size > ?COUNT_PER_PAGE -> format_page_nav(Key, Page + 1, "Next");
true -> []
end
end,
"<p class=\"page-nav\">" ++ Nav ++ "</p>".
format_page_nav(Key, Page, Tip) ->
?TEXT("<a href=\"http_handler:sphinx_search?q=~s&p=~p\">~s</a>", [Key, Page, Tip]).
format_search_result(RetList) -> format_search_result(RetList) ->
[format_one_result(Result, false) || Result <- RetList]. [format_one_result(Result, false) || Result <- RetList].

View File

@ -4,20 +4,23 @@
%% 07.28.2013 %% 07.28.2013
%% %%
-module(sphinx_search). -module(sphinx_search).
-export([init/0, search/2]). -include("vlog.hrl").
-export([search/4]).
-define(PORT, 9312). -define(PORT, 9312).
-define(INDEX, "xml"). -define(INDEX, "xml").
-define(PAGECNT, 10).
init() -> search(Conn, Key, Offset, Count) ->
code:add_path("deps/giza/ebin").
%application:start(giza).
search(Conn, Key) ->
Q1 = giza_query:new(?INDEX, Key), Q1 = giza_query:new(?INDEX, Key),
Q2 = giza_query:port(Q1, ?PORT), Q2 = giza_query:port(Q1, ?PORT),
{ok, Ret} = giza_request:send(Q2), Q3 = giza_query:offset(Q2, Offset),
decode_search_ret(Conn, Ret). Q4 = giza_query:limit(Q3, Count),
case catch giza_request:send(Q4) of
{'EXIT', R} ->
?W(?FMT("sphinx search error ~p", [R])),
[];
{ok, Ret} ->
decode_search_ret(Conn, Ret)
end.
decode_search_ret(Conn, Ret) -> decode_search_ret(Conn, Ret) ->
Hashes = [translate_hash(Item) || Item <- Ret], Hashes = [translate_hash(Item) || Item <- Ret],