2007-12-01 06:16:30 +01:00
|
|
|
%%%----------------------------------------------------------------------
|
|
|
|
%%% File : mod_caps.erl
|
|
|
|
%%% Author : Magnus Henoch <henoch@dtek.chalmers.se>
|
|
|
|
%%% Purpose : Request and cache Entity Capabilities (XEP-0115)
|
|
|
|
%%% Created : 7 Oct 2006 by Magnus Henoch <henoch@dtek.chalmers.se>
|
2007-12-24 12:41:41 +01:00
|
|
|
%%%
|
|
|
|
%%%
|
2009-01-19 15:47:33 +01:00
|
|
|
%%% ejabberd, Copyright (C) 2002-2009 ProcessOne
|
2007-12-24 12:41:41 +01:00
|
|
|
%%%
|
|
|
|
%%% This program is free software; you can redistribute it and/or
|
|
|
|
%%% modify it under the terms of the GNU General Public License as
|
|
|
|
%%% published by the Free Software Foundation; either version 2 of the
|
|
|
|
%%% License, or (at your option) any later version.
|
|
|
|
%%%
|
|
|
|
%%% This program is distributed in the hope that it will be useful,
|
|
|
|
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
%%% General Public License for more details.
|
2009-01-19 15:47:33 +01:00
|
|
|
%%%
|
2007-12-24 12:41:41 +01:00
|
|
|
%%% You should have received a copy of the GNU General Public License
|
|
|
|
%%% along with this program; if not, write to the Free Software
|
|
|
|
%%% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
|
|
|
%%% 02111-1307 USA
|
|
|
|
%%%
|
2009-11-04 21:19:38 +01:00
|
|
|
%%% 2009, improvements from ProcessOne to support correct PEP handling
|
2009-05-01 01:17:38 +02:00
|
|
|
%%% through s2s, use less memory, and speedup global caps handling
|
2007-12-01 06:16:30 +01:00
|
|
|
%%%----------------------------------------------------------------------
|
|
|
|
|
|
|
|
-module(mod_caps).
|
|
|
|
-author('henoch@dtek.chalmers.se').
|
|
|
|
|
|
|
|
-behaviour(gen_server).
|
|
|
|
-behaviour(gen_mod).
|
|
|
|
|
|
|
|
-export([read_caps/1,
|
2008-12-16 14:16:56 +01:00
|
|
|
get_caps/1,
|
2007-12-01 06:16:30 +01:00
|
|
|
note_caps/3,
|
2009-05-11 19:27:55 +02:00
|
|
|
wait_caps/2,
|
2008-12-16 14:16:56 +01:00
|
|
|
clear_caps/1,
|
2007-12-01 06:16:30 +01:00
|
|
|
get_features/2,
|
2009-03-04 00:26:07 +01:00
|
|
|
get_user_resources/2,
|
2007-12-01 06:16:30 +01:00
|
|
|
handle_disco_response/3]).
|
|
|
|
|
|
|
|
%% gen_mod callbacks
|
|
|
|
-export([start/2, start_link/2,
|
|
|
|
stop/1]).
|
|
|
|
|
|
|
|
%% gen_server callbacks
|
|
|
|
-export([init/1,
|
|
|
|
handle_info/2,
|
|
|
|
handle_call/3,
|
|
|
|
handle_cast/2,
|
|
|
|
terminate/2,
|
|
|
|
code_change/3
|
|
|
|
]).
|
|
|
|
|
2008-07-17 17:26:48 +02:00
|
|
|
-include_lib("exmpp/include/exmpp.hrl").
|
|
|
|
|
2009-04-10 15:21:37 +02:00
|
|
|
%% hook handlers
|
|
|
|
-export([receive_packet/3,
|
2009-05-11 19:27:55 +02:00
|
|
|
receive_packet/4,
|
2009-05-29 01:21:50 +02:00
|
|
|
presence_probe/3,
|
|
|
|
remove_connection/3]).
|
2009-04-10 15:21:37 +02:00
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
-include("ejabberd.hrl").
|
|
|
|
|
|
|
|
-define(PROCNAME, ejabberd_mod_caps).
|
|
|
|
-define(DICT, dict).
|
2008-07-11 14:48:27 +02:00
|
|
|
-define(CAPS_QUERY_TIMEOUT, 60000). % 1mn without answer, consider client never answer
|
2007-12-01 06:16:30 +01:00
|
|
|
|
|
|
|
-record(caps, {node, version, exts}).
|
2009-05-01 01:17:38 +02:00
|
|
|
-record(caps_features, {node_pair, features = []}).
|
2008-12-16 14:16:56 +01:00
|
|
|
-record(user_caps, {jid, caps}).
|
2009-03-04 00:26:07 +01:00
|
|
|
-record(user_caps_resources, {uid, resource}).
|
2007-12-01 06:16:30 +01:00
|
|
|
-record(state, {host,
|
|
|
|
disco_requests = ?DICT:new(),
|
|
|
|
feature_queries = []}).
|
|
|
|
|
|
|
|
%% read_caps takes a list of XML elements (the child elements of a
|
|
|
|
%% <presence/> stanza) and returns an opaque value representing the
|
|
|
|
%% Entity Capabilities contained therein, or the atom nothing if no
|
|
|
|
%% capabilities are advertised.
|
2008-01-11 14:57:29 +01:00
|
|
|
read_caps(Els) ->
|
|
|
|
read_caps(Els, nothing).
|
2008-07-17 17:26:48 +02:00
|
|
|
read_caps([#xmlel{ns = ?NS_CAPS, name = 'c'} = El | Tail], _Result) ->
|
2009-01-21 14:34:26 +01:00
|
|
|
Node = exmpp_xml:get_attribute_as_list(El, 'node', ""),
|
|
|
|
Version = exmpp_xml:get_attribute_as_list(El, 'ver', ""),
|
|
|
|
Exts = string:tokens(exmpp_xml:get_attribute_as_list(El, 'ext', ""), " "),
|
2008-07-17 17:26:48 +02:00
|
|
|
read_caps(Tail, #caps{node = Node, version = Version, exts = Exts});
|
|
|
|
read_caps([#xmlel{ns = ?NS_MUC_USER, name = 'x'} | _Tail], _Result) ->
|
|
|
|
nothing;
|
2008-01-11 14:57:29 +01:00
|
|
|
read_caps([_ | Tail], Result) ->
|
|
|
|
read_caps(Tail, Result);
|
|
|
|
read_caps([], Result) ->
|
|
|
|
Result.
|
2007-12-01 06:16:30 +01:00
|
|
|
|
2008-12-16 14:16:56 +01:00
|
|
|
%% get_caps reads user caps from database
|
2009-05-11 19:27:55 +02:00
|
|
|
%% here we handle a simple retry loop, to avoid race condition
|
|
|
|
%% when asking caps while we still did not called note_caps
|
|
|
|
%% timeout is set to 10s
|
|
|
|
%% this is to be improved, but without altering performances.
|
|
|
|
%% if we did not get user presence 10s after getting presence_probe
|
|
|
|
%% we assume it has no caps
|
|
|
|
get_caps(LJID) ->
|
|
|
|
get_caps(LJID, 5).
|
|
|
|
get_caps(_, 0) ->
|
|
|
|
nothing;
|
|
|
|
get_caps({U, S, R}, Retry) ->
|
2009-06-01 18:59:08 +02:00
|
|
|
BJID = exmpp_jid:to_binary(U, S, R),
|
2008-12-23 14:58:38 +01:00
|
|
|
case catch mnesia:dirty_read({user_caps, BJID}) of
|
2009-05-11 19:27:55 +02:00
|
|
|
[#user_caps{caps=waiting}] ->
|
|
|
|
timer:sleep(2000),
|
|
|
|
get_caps({U, S, R}, Retry-1);
|
|
|
|
[#user_caps{caps=Caps}] ->
|
2008-12-16 14:16:56 +01:00
|
|
|
Caps;
|
2009-05-11 19:27:55 +02:00
|
|
|
_ ->
|
2008-12-16 14:16:56 +01:00
|
|
|
nothing
|
|
|
|
end.
|
|
|
|
|
|
|
|
%% clear_caps removes user caps from database
|
2009-01-12 19:15:44 +01:00
|
|
|
clear_caps(JID) ->
|
2009-06-01 18:42:07 +02:00
|
|
|
R = exmpp_jid:prep_resource(JID),
|
2009-06-01 18:59:08 +02:00
|
|
|
BJID = exmpp_jid:to_binary(JID),
|
2009-06-01 19:00:44 +02:00
|
|
|
BUID = exmpp_jid:bare_to_binary(JID),
|
2008-12-23 14:58:38 +01:00
|
|
|
catch mnesia:dirty_delete({user_caps, BJID}),
|
2009-03-04 00:26:07 +01:00
|
|
|
catch mnesia:dirty_delete_object(#user_caps_resources{uid = BUID, resource = list_to_binary(R)}),
|
|
|
|
ok.
|
2008-12-23 14:58:38 +01:00
|
|
|
|
|
|
|
%% give default user resource
|
2009-03-04 00:26:07 +01:00
|
|
|
get_user_resources(U, S) ->
|
2009-06-01 19:00:44 +02:00
|
|
|
BUID = exmpp_jid:bare_to_binary(U, S),
|
2009-03-04 00:26:07 +01:00
|
|
|
case catch mnesia:dirty_read({user_caps_resources, BUID}) of
|
|
|
|
{'EXIT', _} ->
|
|
|
|
[];
|
|
|
|
Resources ->
|
|
|
|
lists:map(fun(#user_caps_resources{resource=R}) -> binary_to_list(R) end, Resources)
|
2008-12-23 14:58:38 +01:00
|
|
|
end.
|
2008-12-16 14:16:56 +01:00
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
%% note_caps should be called to make the module request disco
|
|
|
|
%% information. Host is the host that asks, From is the full JID that
|
|
|
|
%% sent the caps packet, and Caps is what read_caps returned.
|
|
|
|
note_caps(Host, From, Caps) ->
|
2009-09-25 14:48:10 +02:00
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
gen_server:cast(Proc, {note_caps, From, Caps}).
|
2007-12-01 06:16:30 +01:00
|
|
|
|
2009-05-11 19:27:55 +02:00
|
|
|
%% wait_caps should be called just before note_caps
|
|
|
|
%% it allows to lock get_caps usage for code using presence_probe
|
|
|
|
%% that may run before we get any chance to note_caps.
|
|
|
|
wait_caps(Host, From) ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
gen_server:cast(Proc, {wait_caps, From}).
|
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
%% get_features returns a list of features implied by the given caps
|
|
|
|
%% record (as extracted by read_caps). It may block, and may signal a
|
|
|
|
%% timeout error.
|
|
|
|
get_features(Host, Caps) ->
|
|
|
|
case Caps of
|
2008-12-23 14:58:38 +01:00
|
|
|
nothing ->
|
|
|
|
[];
|
2007-12-01 06:16:30 +01:00
|
|
|
#caps{} ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
gen_server:call(Proc, {get_features, Caps})
|
|
|
|
end.
|
|
|
|
|
|
|
|
start_link(Host, Opts) ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
gen_server:start_link({local, Proc}, ?MODULE, [Host, Opts], []).
|
|
|
|
|
|
|
|
start(Host, Opts) ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
ChildSpec =
|
|
|
|
{Proc,
|
|
|
|
{?MODULE, start_link, [Host, Opts]},
|
|
|
|
transient,
|
|
|
|
1000,
|
|
|
|
worker,
|
|
|
|
[?MODULE]},
|
|
|
|
supervisor:start_child(ejabberd_sup, ChildSpec).
|
|
|
|
|
|
|
|
stop(Host) ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
2008-02-28 01:30:23 +01:00
|
|
|
gen_server:call(Proc, stop).
|
2007-12-01 06:16:30 +01:00
|
|
|
|
2009-04-10 15:21:37 +02:00
|
|
|
receive_packet(From, To, Packet) when ?IS_PRESENCE(Packet) ->
|
|
|
|
case exmpp_presence:get_type(Packet) of
|
|
|
|
'probe' ->
|
|
|
|
ok;
|
|
|
|
'error' ->
|
|
|
|
ok;
|
|
|
|
'invisible' ->
|
|
|
|
ok;
|
|
|
|
'subscribe' ->
|
|
|
|
ok;
|
2009-04-14 14:46:55 +02:00
|
|
|
'subscribed' ->
|
|
|
|
ok;
|
2009-04-10 15:21:37 +02:00
|
|
|
'unsubscribe' ->
|
|
|
|
ok;
|
|
|
|
'unsubscribed' ->
|
|
|
|
ok;
|
|
|
|
'unavailable' ->
|
2009-05-29 01:21:50 +02:00
|
|
|
{_, S1, _} = jlib:short_prepd_jid(From),
|
|
|
|
case jlib:short_prepd_jid(To) of
|
|
|
|
{_, S1, _} -> ok;
|
|
|
|
_ -> clear_caps(From)
|
|
|
|
end;
|
|
|
|
%% TODO: probe client, and clean only if no answers
|
|
|
|
%% as far as protocol does not allow inter-server communication to
|
|
|
|
%% let remote server handle it's own caps to decide which user is to be
|
|
|
|
%% notified, we must keep a cache of online status of external contacts
|
|
|
|
%% this is absolutely not scallable, but we have no choice for now
|
|
|
|
%% we can only use unavailable presence, but if remote user just remove a local user
|
|
|
|
%% from its roster, then it's considered as offline, so he does not receive local PEP
|
|
|
|
%% anymore until he login again.
|
|
|
|
%% This is tracked in EJAB-943
|
2009-04-10 15:21:37 +02:00
|
|
|
_ ->
|
2009-06-01 18:38:28 +02:00
|
|
|
ServerString = exmpp_jid:prep_domain_as_list(To),
|
2009-04-10 15:21:37 +02:00
|
|
|
Els = Packet#xmlel.children,
|
|
|
|
note_caps(ServerString, From, read_caps(Els))
|
|
|
|
end;
|
|
|
|
receive_packet(_, _, _) ->
|
|
|
|
ok.
|
|
|
|
|
|
|
|
receive_packet(_JID, From, To, Packet) ->
|
|
|
|
receive_packet(From, To, Packet).
|
|
|
|
|
2009-05-11 19:27:55 +02:00
|
|
|
presence_probe(From, To, _) ->
|
2009-06-01 18:38:28 +02:00
|
|
|
ServerString = exmpp_jid:prep_domain_as_list(To),
|
2009-05-11 19:27:55 +02:00
|
|
|
wait_caps(ServerString, From).
|
|
|
|
|
2009-05-29 01:21:50 +02:00
|
|
|
remove_connection(_SID, JID, _Info) ->
|
|
|
|
clear_caps(JID).
|
|
|
|
|
2009-05-01 01:17:38 +02:00
|
|
|
caps_to_binary(#caps{node = Node, version = Version, exts = Exts}) ->
|
|
|
|
BExts = [list_to_binary(Ext) || Ext <- Exts],
|
|
|
|
#caps{node = list_to_binary(Node), version = list_to_binary(Version), exts = BExts}.
|
|
|
|
|
|
|
|
node_to_binary(Node, SubNode) ->
|
|
|
|
{list_to_binary(Node), list_to_binary(SubNode)}.
|
|
|
|
|
|
|
|
features_to_binary(L) -> [list_to_binary(I) || I <- L].
|
|
|
|
binary_to_features(L) -> [binary_to_list(I) || I <- L].
|
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
%%====================================================================
|
|
|
|
%% gen_server callbacks
|
|
|
|
%%====================================================================
|
|
|
|
|
|
|
|
init([Host, _Opts]) ->
|
|
|
|
mnesia:create_table(caps_features,
|
2009-05-01 01:17:38 +02:00
|
|
|
[{disc_copies, [node()]},
|
2007-12-01 06:16:30 +01:00
|
|
|
{attributes, record_info(fields, caps_features)}]),
|
2008-12-16 14:16:56 +01:00
|
|
|
mnesia:create_table(user_caps,
|
2009-05-01 01:17:38 +02:00
|
|
|
[{ram_copies, [node()]},
|
2008-12-16 14:16:56 +01:00
|
|
|
{attributes, record_info(fields, user_caps)}]),
|
2009-03-04 00:26:07 +01:00
|
|
|
mnesia:create_table(user_caps_resources,
|
2009-05-01 01:17:38 +02:00
|
|
|
[{ram_copies, [node()]},
|
2009-03-04 00:26:07 +01:00
|
|
|
{type, bag},
|
|
|
|
{attributes, record_info(fields, user_caps_resources)}]),
|
|
|
|
mnesia:delete_table(user_caps_default),
|
2009-05-11 19:27:55 +02:00
|
|
|
mnesia:clear_table(user_caps), % clean in case of explicitely set to disc_copies
|
|
|
|
mnesia:clear_table(user_caps_resources), % clean in case of explicitely set to disc_copies
|
2009-04-10 15:21:37 +02:00
|
|
|
ejabberd_hooks:add(user_receive_packet, Host, ?MODULE, receive_packet, 30),
|
|
|
|
ejabberd_hooks:add(s2s_receive_packet, Host, ?MODULE, receive_packet, 30),
|
2009-05-11 19:27:55 +02:00
|
|
|
ejabberd_hooks:add(presence_probe_hook, Host, ?MODULE, presence_probe, 20),
|
2009-05-29 01:21:50 +02:00
|
|
|
ejabberd_hooks:add(sm_remove_connection_hook, Host, ?MODULE, remove_connection, 20),
|
2007-12-01 06:16:30 +01:00
|
|
|
{ok, #state{host = Host}}.
|
|
|
|
|
|
|
|
maybe_get_features(#caps{node = Node, version = Version, exts = Exts}) ->
|
|
|
|
SubNodes = [Version | Exts],
|
2009-05-01 01:17:38 +02:00
|
|
|
%% Make sure that we have all nodes we need to know.
|
|
|
|
%% If a single one is missing, we wait for more disco
|
|
|
|
%% responses.
|
|
|
|
case lists:foldl(fun(SubNode, Acc) ->
|
|
|
|
case Acc of
|
|
|
|
fail -> fail;
|
|
|
|
_ ->
|
|
|
|
case mnesia:dirty_read({caps_features, {Node, SubNode}}) of
|
|
|
|
[] -> fail;
|
|
|
|
[#caps_features{features = Features}] -> Features ++ Acc %% TODO binary
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end, [], SubNodes) of
|
|
|
|
fail -> wait;
|
|
|
|
Features -> {ok, Features}
|
2007-12-01 06:16:30 +01:00
|
|
|
end.
|
|
|
|
|
|
|
|
timestamp() ->
|
|
|
|
{MegaSecs, Secs, _MicroSecs} = now(),
|
|
|
|
MegaSecs * 1000000 + Secs.
|
2007-12-07 01:09:48 +01:00
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
handle_call({get_features, Caps}, From, State) ->
|
|
|
|
case maybe_get_features(Caps) of
|
|
|
|
{ok, Features} ->
|
2009-05-01 01:17:38 +02:00
|
|
|
{reply, binary_to_features(Features), State};
|
2007-12-01 06:16:30 +01:00
|
|
|
wait ->
|
2007-12-22 00:36:44 +01:00
|
|
|
gen_server:cast(self(), visit_feature_queries),
|
2007-12-01 06:16:30 +01:00
|
|
|
Timeout = timestamp() + 10,
|
|
|
|
FeatureQueries = State#state.feature_queries,
|
|
|
|
NewFeatureQueries = [{From, Caps, Timeout} | FeatureQueries],
|
|
|
|
NewState = State#state{feature_queries = NewFeatureQueries},
|
|
|
|
{noreply, NewState}
|
|
|
|
end;
|
|
|
|
|
|
|
|
handle_call(stop, _From, State) ->
|
|
|
|
{stop, normal, ok, State}.
|
|
|
|
|
2009-09-25 14:48:10 +02:00
|
|
|
handle_cast({note_caps, From, nothing}, State) ->
|
|
|
|
BJID = exmpp_jid:to_binary(From),
|
|
|
|
catch mnesia:dirty_delete({user_caps, BJID}),
|
|
|
|
{noreply, State};
|
2007-12-01 06:16:30 +01:00
|
|
|
handle_cast({note_caps, From,
|
2008-12-16 14:16:56 +01:00
|
|
|
#caps{node = Node, version = Version, exts = Exts} = Caps},
|
2007-12-01 06:16:30 +01:00
|
|
|
#state{host = Host, disco_requests = Requests} = State) ->
|
|
|
|
%% XXX: this leads to race conditions where ejabberd will send
|
|
|
|
%% lots of caps disco requests.
|
2009-01-03 16:15:38 +01:00
|
|
|
%#jid{node = U, domain = S, resource = R} = From,
|
2009-06-01 18:39:36 +02:00
|
|
|
U = exmpp_jid:prep_node(From),
|
2009-06-01 18:37:15 +02:00
|
|
|
S = exmpp_jid:prep_domain(From),
|
2009-01-03 16:15:38 +01:00
|
|
|
R = exmpp_jid:resource(From),
|
2009-06-01 18:59:08 +02:00
|
|
|
BJID = exmpp_jid:to_binary(From),
|
2009-05-11 19:27:55 +02:00
|
|
|
mnesia:transaction(fun() ->
|
|
|
|
mnesia:dirty_write(#user_caps{jid = BJID, caps = caps_to_binary(Caps)}),
|
|
|
|
case ejabberd_sm:get_user_resources(U, S) of
|
|
|
|
[] ->
|
|
|
|
% only store resource of caps aware external contacts
|
2009-06-01 19:00:44 +02:00
|
|
|
BUID = exmpp_jid:bare_to_binary(From),
|
2009-05-11 19:27:55 +02:00
|
|
|
mnesia:dirty_write(#user_caps_resources{uid = BUID, resource = list_to_binary(R)});
|
|
|
|
_ ->
|
|
|
|
ok
|
|
|
|
end
|
|
|
|
end),
|
2007-12-01 06:16:30 +01:00
|
|
|
%% Now, find which of these are not already in the database.
|
2009-05-01 01:17:38 +02:00
|
|
|
SubNodes = [Version | Exts],
|
|
|
|
case lists:foldl(fun(SubNode, Acc) ->
|
|
|
|
case mnesia:dirty_read({caps_features, {Node, SubNode}}) of
|
|
|
|
[] ->
|
|
|
|
[SubNode | Acc];
|
|
|
|
_ ->
|
|
|
|
Acc
|
|
|
|
end
|
|
|
|
end, [], SubNodes) of
|
|
|
|
[] ->
|
|
|
|
{noreply, State};
|
|
|
|
Missing ->
|
2008-12-16 14:16:56 +01:00
|
|
|
%% For each unknown caps "subnode", we send a disco request.
|
|
|
|
NewRequests = lists:foldl(
|
|
|
|
fun(SubNode, Dict) ->
|
2007-12-01 06:16:30 +01:00
|
|
|
ID = randoms:get_string(),
|
2008-07-17 17:26:48 +02:00
|
|
|
Query = exmpp_xml:set_attribute(
|
|
|
|
#xmlel{ns = ?NS_DISCO_INFO, name = 'query'},
|
|
|
|
'node', lists:concat([Node, "#", SubNode])),
|
|
|
|
Stanza = exmpp_iq:get(?NS_JABBER_CLIENT, Query, ID),
|
2007-12-01 06:16:30 +01:00
|
|
|
ejabberd_local:register_iq_response_handler
|
2009-01-10 17:10:12 +01:00
|
|
|
(list_to_binary(Host), ID, ?MODULE, handle_disco_response),
|
2009-09-24 21:52:46 +02:00
|
|
|
ejabberd_router:route(exmpp_jid:make(Host), From, Stanza),
|
|
|
|
timer:send_after(?CAPS_QUERY_TIMEOUT, self(), {disco_timeout, ID, BJID}),
|
2009-05-01 01:17:38 +02:00
|
|
|
?DICT:store(ID, node_to_binary(Node, SubNode), Dict)
|
2007-12-01 06:16:30 +01:00
|
|
|
end, Requests, Missing),
|
2009-05-01 01:17:38 +02:00
|
|
|
{noreply, State#state{disco_requests = NewRequests}}
|
2007-12-01 06:16:30 +01:00
|
|
|
end;
|
2009-05-11 19:27:55 +02:00
|
|
|
handle_cast({wait_caps, From}, State) ->
|
2009-06-01 18:59:08 +02:00
|
|
|
BJID = exmpp_jid:to_binary(From),
|
2009-05-11 19:27:55 +02:00
|
|
|
mnesia:dirty_write(#user_caps{jid = BJID, caps = waiting}),
|
|
|
|
{noreply, State};
|
2008-08-27 11:46:25 +02:00
|
|
|
handle_cast({disco_response, From, _To, #iq{id = ID, type = Type, payload = Payload}},
|
2007-12-01 06:16:30 +01:00
|
|
|
#state{disco_requests = Requests} = State) ->
|
2008-08-27 11:46:25 +02:00
|
|
|
case {Type, Payload} of
|
2008-07-22 16:51:19 +02:00
|
|
|
{result, #xmlel{name = 'query', children = Els}} ->
|
2007-12-01 06:16:30 +01:00
|
|
|
case ?DICT:find(ID, Requests) of
|
2009-05-01 01:17:38 +02:00
|
|
|
{ok, BinaryNode} ->
|
2007-12-01 06:16:30 +01:00
|
|
|
Features =
|
2008-07-22 16:51:19 +02:00
|
|
|
lists:flatmap(fun(#xmlel{name = 'feature'} = F) ->
|
2009-01-21 14:34:26 +01:00
|
|
|
[exmpp_xml:get_attribute_as_list(F, 'var', "")];
|
2007-12-01 06:16:30 +01:00
|
|
|
(_) ->
|
|
|
|
[]
|
|
|
|
end, Els),
|
2009-05-01 01:17:38 +02:00
|
|
|
mnesia:dirty_write(#caps_features{node_pair = BinaryNode, features = features_to_binary(Features)}),
|
2007-12-01 06:16:30 +01:00
|
|
|
gen_server:cast(self(), visit_feature_queries);
|
|
|
|
error ->
|
2009-05-07 02:54:44 +02:00
|
|
|
?DEBUG("ID '~s' matches no query", [ID])
|
2007-12-01 06:16:30 +01:00
|
|
|
end;
|
2007-12-22 00:36:44 +01:00
|
|
|
{error, _} ->
|
2008-04-14 14:02:18 +02:00
|
|
|
%% XXX: if we get error, we cache empty feature not to probe the client continuously
|
|
|
|
case ?DICT:find(ID, Requests) of
|
2009-05-01 01:17:38 +02:00
|
|
|
{ok, BinaryNode} ->
|
|
|
|
mnesia:dirty_write(#caps_features{node_pair = BinaryNode}),
|
2008-04-14 14:02:18 +02:00
|
|
|
gen_server:cast(self(), visit_feature_queries);
|
|
|
|
error ->
|
2009-05-07 02:54:44 +02:00
|
|
|
?DEBUG("ID '~s' matches no query", [ID])
|
2008-04-14 14:02:18 +02:00
|
|
|
end;
|
|
|
|
%gen_server:cast(self(), visit_feature_queries),
|
2009-06-01 18:52:14 +02:00
|
|
|
%?DEBUG("Error IQ reponse from ~s:~n~p", [exmpp_jid:to_list(From), SubEls]);
|
2008-08-27 11:46:25 +02:00
|
|
|
{result, Payload} ->
|
2009-06-01 18:59:08 +02:00
|
|
|
?DEBUG("Invalid IQ contents from ~s:~n~p", [exmpp_jid:to_binary(From), Payload]);
|
2007-12-01 06:16:30 +01:00
|
|
|
_ ->
|
|
|
|
%% Can't do anything about errors
|
|
|
|
ok
|
|
|
|
end,
|
|
|
|
NewRequests = ?DICT:erase(ID, Requests),
|
|
|
|
{noreply, State#state{disco_requests = NewRequests}};
|
2009-09-24 21:52:46 +02:00
|
|
|
handle_cast({disco_timeout, ID, BJID}, #state{host = Host, disco_requests = Requests} = State) ->
|
2008-07-11 14:48:27 +02:00
|
|
|
%% do not wait a response anymore for this IQ, client certainly will never answer
|
|
|
|
NewRequests = case ?DICT:is_key(ID, Requests) of
|
|
|
|
true ->
|
2009-09-24 21:52:46 +02:00
|
|
|
catch mnesia:dirty_delete({user_caps, BJID}),
|
2009-01-10 17:10:12 +01:00
|
|
|
ejabberd_local:unregister_iq_response_handler(list_to_binary(Host), ID),
|
2008-07-11 14:48:27 +02:00
|
|
|
?DICT:erase(ID, Requests);
|
|
|
|
false ->
|
|
|
|
Requests
|
|
|
|
end,
|
|
|
|
{noreply, State#state{disco_requests = NewRequests}};
|
2007-12-01 06:16:30 +01:00
|
|
|
handle_cast(visit_feature_queries, #state{feature_queries = FeatureQueries} = State) ->
|
|
|
|
Timestamp = timestamp(),
|
|
|
|
NewFeatureQueries =
|
|
|
|
lists:foldl(fun({From, Caps, Timeout}, Acc) ->
|
|
|
|
case maybe_get_features(Caps) of
|
2007-12-22 00:36:44 +01:00
|
|
|
wait when Timeout > Timestamp -> [{From, Caps, Timeout} | Acc];
|
2007-12-01 06:16:30 +01:00
|
|
|
wait -> Acc;
|
|
|
|
{ok, Features} ->
|
|
|
|
gen_server:reply(From, Features),
|
|
|
|
Acc
|
|
|
|
end
|
|
|
|
end, [], FeatureQueries),
|
|
|
|
{noreply, State#state{feature_queries = NewFeatureQueries}}.
|
|
|
|
|
2008-08-27 11:46:25 +02:00
|
|
|
handle_disco_response(From, To, IQ_Rec) ->
|
2009-06-01 18:38:28 +02:00
|
|
|
Host = exmpp_jid:prep_domain_as_list(To),
|
2007-12-01 06:16:30 +01:00
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
2008-08-27 11:46:25 +02:00
|
|
|
gen_server:cast(Proc, {disco_response, From, To, IQ_Rec}).
|
2007-12-01 06:16:30 +01:00
|
|
|
|
|
|
|
handle_info(_Info, State) ->
|
|
|
|
{noreply, State}.
|
|
|
|
|
2009-04-10 15:21:37 +02:00
|
|
|
terminate(_Reason, State) ->
|
|
|
|
Host = State#state.host,
|
|
|
|
ejabberd_hooks:delete(user_receive_packet, Host, ?MODULE, receive_packet, 30),
|
|
|
|
ejabberd_hooks:delete(s2s_receive_packet, Host, ?MODULE, receive_packet, 30),
|
2009-05-11 19:27:55 +02:00
|
|
|
ejabberd_hooks:delete(presence_probe_hook, Host, ?MODULE, presence_probe, 20),
|
2009-05-29 01:21:50 +02:00
|
|
|
ejabberd_hooks:delete(sm_remove_connection_hook, Host, ?MODULE, remove_connection, 20),
|
2007-12-01 06:16:30 +01:00
|
|
|
ok.
|
|
|
|
|
|
|
|
code_change(_OldVsn, State, _Extra) ->
|
|
|
|
{ok, State}.
|