mirror of
https://github.com/btdig/dhtcrawler2.git
synced 2025-02-23 13:49:03 +00:00
add a simple page navigation for sphinx_search
This commit is contained in:
parent
e1c905b0a7
commit
1d27f2416b
@ -6,7 +6,7 @@
|
||||
-module(db_frontend).
|
||||
-export([start/3,
|
||||
search/1,
|
||||
search_by_sphinx/1,
|
||||
search_by_sphinx/3,
|
||||
today_top/0,
|
||||
search_one/1,
|
||||
stats/0,
|
||||
@ -28,9 +28,10 @@ search(Keyword) ->
|
||||
Conn = mongo_pool:get(?DB_POOLNAME),
|
||||
db_store_mongo:search(Conn, Keyword).
|
||||
|
||||
search_by_sphinx(Keyword) ->
|
||||
search_by_sphinx(Keyword, Page, Count) ->
|
||||
Offset = Page * Count,
|
||||
Conn = mongo_pool:get(?DB_POOLNAME),
|
||||
sphinx_search:search(Conn, Keyword).
|
||||
sphinx_search:search(Conn, Keyword, Offset, Count).
|
||||
|
||||
today_top() ->
|
||||
Conn = mongo_pool:get(?DB_POOLNAME),
|
||||
|
@ -5,6 +5,7 @@
|
||||
%%
|
||||
-module(http_common).
|
||||
-export([get_search_keyword/1,
|
||||
parse_args/1,
|
||||
get_view_hash/1,
|
||||
remote_addr/1,
|
||||
list_to_utf_binary/1,
|
||||
@ -22,8 +23,7 @@ get_view_hash(Input) ->
|
||||
get_q_arg(Input).
|
||||
|
||||
get_q_arg(Input) ->
|
||||
D = urldecode:decode(Input),
|
||||
ReqList = httpd:parse_query(D),
|
||||
ReqList = parse_args(Input),
|
||||
case proplists:get_value("q", ReqList) of
|
||||
undefined ->
|
||||
"";
|
||||
@ -31,6 +31,10 @@ get_q_arg(Input) ->
|
||||
Arg
|
||||
end.
|
||||
|
||||
parse_args(Input) ->
|
||||
D = urldecode:decode(Input),
|
||||
httpd:parse_query(D).
|
||||
|
||||
sort_file_by_size(Files) ->
|
||||
lists:sort(fun({_, L1}, {_, L2}) ->
|
||||
L1 > L2
|
||||
|
@ -16,6 +16,7 @@
|
||||
-define(TEXT(Fmt, Args), lists:flatten(io_lib:format(Fmt, Args))).
|
||||
-import(torrent_file, [size_string/1]).
|
||||
-define(CONTENT_TYPE, "Content-Type: text/html\r\n\r\n").
|
||||
-define(COUNT_PER_PAGE, 10).
|
||||
-include("vlog.hrl").
|
||||
|
||||
search(SessionID, Env, Input) ->
|
||||
@ -37,7 +38,9 @@ sphinx_search(SessionID, Env, Input) ->
|
||||
Key ->
|
||||
US = http_common:list_to_utf_binary(Key),
|
||||
?LOG_STR(?INFO, ?FMT("remote ~p search /~s/", [http_common:remote_addr(Env), US])),
|
||||
{Key, do_search_sphinx(Key)}
|
||||
Args = http_common:parse_args(Input),
|
||||
Page = case proplists:get_value("p", Args) of undefined -> 0; Val -> list_to_integer(Val) end,
|
||||
{Key, do_search_sphinx(Key, Page)}
|
||||
end,
|
||||
Response = simple_html(K, Body),
|
||||
mod_esi:deliver(SessionID, [?CONTENT_TYPE, Response]).
|
||||
@ -112,11 +115,34 @@ do_search(Keyword) ->
|
||||
Body = ?TEXT("<ol>~s</ol>", [lists:flatten(BodyList)]),
|
||||
Tip ++ Body.
|
||||
|
||||
do_search_sphinx(Keyword) ->
|
||||
Rets = db_frontend:search_by_sphinx(Keyword),
|
||||
BodyList = format_search_result(Rets),
|
||||
do_search_sphinx(Keyword, Page) ->
|
||||
Rets = db_frontend:search_by_sphinx(Keyword, Page, ?COUNT_PER_PAGE + 1),
|
||||
ThisPage = lists:sublist(Rets, ?COUNT_PER_PAGE),
|
||||
BodyList = format_search_result(ThisPage),
|
||||
Body = ?TEXT("<ol>~s</ol>", [lists:flatten(BodyList)]),
|
||||
Body.
|
||||
Body ++ append_page_nav(Keyword, Page, Rets).
|
||||
|
||||
append_page_nav(Key, Page, ThisRet) ->
|
||||
Nav = case length(ThisRet) of
|
||||
0 when Page > 0 ->
|
||||
format_page_nav(Key, Page - 1, "Prev");
|
||||
0 ->
|
||||
[];
|
||||
Size when Page > 0 ->
|
||||
format_page_nav(Key, Page - 1, "Prev") ++
|
||||
if Size > ?COUNT_PER_PAGE ->
|
||||
"|" ++ format_page_nav(Key, Page + 1, "Next");
|
||||
true -> []
|
||||
end;
|
||||
Size ->
|
||||
if Size > ?COUNT_PER_PAGE -> format_page_nav(Key, Page + 1, "Next");
|
||||
true -> []
|
||||
end
|
||||
end,
|
||||
"<p class=\"page-nav\">" ++ Nav ++ "</p>".
|
||||
|
||||
format_page_nav(Key, Page, Tip) ->
|
||||
?TEXT("<a href=\"http_handler:sphinx_search?q=~s&p=~p\">~s</a>", [Key, Page, Tip]).
|
||||
|
||||
format_search_result(RetList) ->
|
||||
[format_one_result(Result, false) || Result <- RetList].
|
||||
|
@ -4,20 +4,23 @@
|
||||
%% 07.28.2013
|
||||
%%
|
||||
-module(sphinx_search).
|
||||
-export([init/0, search/2]).
|
||||
-include("vlog.hrl").
|
||||
-export([search/4]).
|
||||
-define(PORT, 9312).
|
||||
-define(INDEX, "xml").
|
||||
-define(PAGECNT, 10).
|
||||
|
||||
init() ->
|
||||
code:add_path("deps/giza/ebin").
|
||||
%application:start(giza).
|
||||
|
||||
search(Conn, Key) ->
|
||||
search(Conn, Key, Offset, Count) ->
|
||||
Q1 = giza_query:new(?INDEX, Key),
|
||||
Q2 = giza_query:port(Q1, ?PORT),
|
||||
{ok, Ret} = giza_request:send(Q2),
|
||||
decode_search_ret(Conn, Ret).
|
||||
Q3 = giza_query:offset(Q2, Offset),
|
||||
Q4 = giza_query:limit(Q3, Count),
|
||||
case catch giza_request:send(Q4) of
|
||||
{'EXIT', R} ->
|
||||
?W(?FMT("sphinx search error ~p", [R])),
|
||||
[];
|
||||
{ok, Ret} ->
|
||||
decode_search_ret(Conn, Ret)
|
||||
end.
|
||||
|
||||
decode_search_ret(Conn, Ret) ->
|
||||
Hashes = [translate_hash(Item) || Item <- Ret],
|
||||
|
Loading…
Reference in New Issue
Block a user