2007-12-01 06:16:30 +01:00
|
|
|
%%%----------------------------------------------------------------------
|
|
|
|
%%% File : mod_caps.erl
|
|
|
|
%%% Author : Magnus Henoch <henoch@dtek.chalmers.se>
|
|
|
|
%%% Purpose : Request and cache Entity Capabilities (XEP-0115)
|
|
|
|
%%% Created : 7 Oct 2006 by Magnus Henoch <henoch@dtek.chalmers.se>
|
2007-12-24 12:41:41 +01:00
|
|
|
%%%
|
|
|
|
%%%
|
2009-01-19 15:47:33 +01:00
|
|
|
%%% ejabberd, Copyright (C) 2002-2009 ProcessOne
|
2007-12-24 12:41:41 +01:00
|
|
|
%%%
|
|
|
|
%%% This program is free software; you can redistribute it and/or
|
|
|
|
%%% modify it under the terms of the GNU General Public License as
|
|
|
|
%%% published by the Free Software Foundation; either version 2 of the
|
|
|
|
%%% License, or (at your option) any later version.
|
|
|
|
%%%
|
|
|
|
%%% This program is distributed in the hope that it will be useful,
|
|
|
|
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
%%% General Public License for more details.
|
2009-01-19 15:47:33 +01:00
|
|
|
%%%
|
2007-12-24 12:41:41 +01:00
|
|
|
%%% You should have received a copy of the GNU General Public License
|
|
|
|
%%% along with this program; if not, write to the Free Software
|
|
|
|
%%% Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
|
|
|
|
%%% 02111-1307 USA
|
|
|
|
%%%
|
2007-12-01 06:16:30 +01:00
|
|
|
%%%----------------------------------------------------------------------
|
|
|
|
|
|
|
|
-module(mod_caps).
|
|
|
|
-author('henoch@dtek.chalmers.se').
|
|
|
|
|
|
|
|
-behaviour(gen_server).
|
|
|
|
-behaviour(gen_mod).
|
|
|
|
|
|
|
|
-export([read_caps/1,
|
2008-12-16 14:16:56 +01:00
|
|
|
get_caps/1,
|
2007-12-01 06:16:30 +01:00
|
|
|
note_caps/3,
|
2008-12-16 14:16:56 +01:00
|
|
|
clear_caps/1,
|
2007-12-01 06:16:30 +01:00
|
|
|
get_features/2,
|
2008-12-23 14:58:38 +01:00
|
|
|
get_user_resource/2,
|
2007-12-01 06:16:30 +01:00
|
|
|
handle_disco_response/3]).
|
|
|
|
|
|
|
|
%% gen_mod callbacks
|
|
|
|
-export([start/2, start_link/2,
|
|
|
|
stop/1]).
|
|
|
|
|
|
|
|
%% gen_server callbacks
|
|
|
|
-export([init/1,
|
|
|
|
handle_info/2,
|
|
|
|
handle_call/3,
|
|
|
|
handle_cast/2,
|
|
|
|
terminate/2,
|
|
|
|
code_change/3
|
|
|
|
]).
|
|
|
|
|
2008-07-17 17:26:48 +02:00
|
|
|
-include_lib("exmpp/include/exmpp.hrl").
|
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
-include("ejabberd.hrl").
|
|
|
|
|
|
|
|
-define(PROCNAME, ejabberd_mod_caps).
|
|
|
|
-define(DICT, dict).
|
2008-07-11 14:48:27 +02:00
|
|
|
-define(CAPS_QUERY_TIMEOUT, 60000). % 1mn without answer, consider client never answer
|
2007-12-01 06:16:30 +01:00
|
|
|
|
|
|
|
-record(caps, {node, version, exts}).
|
|
|
|
-record(caps_features, {node_pair, features}).
|
2008-12-16 14:16:56 +01:00
|
|
|
-record(user_caps, {jid, caps}).
|
2008-12-23 14:58:38 +01:00
|
|
|
-record(user_caps_default, {uid, resource}).
|
2007-12-01 06:16:30 +01:00
|
|
|
-record(state, {host,
|
|
|
|
disco_requests = ?DICT:new(),
|
|
|
|
feature_queries = []}).
|
|
|
|
|
|
|
|
%% read_caps takes a list of XML elements (the child elements of a
|
|
|
|
%% <presence/> stanza) and returns an opaque value representing the
|
|
|
|
%% Entity Capabilities contained therein, or the atom nothing if no
|
|
|
|
%% capabilities are advertised.
|
2008-01-11 14:57:29 +01:00
|
|
|
read_caps(Els) ->
|
|
|
|
read_caps(Els, nothing).
|
2008-07-17 17:26:48 +02:00
|
|
|
read_caps([#xmlel{ns = ?NS_CAPS, name = 'c'} = El | Tail], _Result) ->
|
2009-01-21 14:34:26 +01:00
|
|
|
Node = exmpp_xml:get_attribute_as_list(El, 'node', ""),
|
|
|
|
Version = exmpp_xml:get_attribute_as_list(El, 'ver', ""),
|
|
|
|
Exts = string:tokens(exmpp_xml:get_attribute_as_list(El, 'ext', ""), " "),
|
2008-07-17 17:26:48 +02:00
|
|
|
read_caps(Tail, #caps{node = Node, version = Version, exts = Exts});
|
|
|
|
read_caps([#xmlel{ns = ?NS_MUC_USER, name = 'x'} | _Tail], _Result) ->
|
|
|
|
nothing;
|
2008-01-11 14:57:29 +01:00
|
|
|
read_caps([_ | Tail], Result) ->
|
|
|
|
read_caps(Tail, Result);
|
|
|
|
read_caps([], Result) ->
|
|
|
|
Result.
|
2007-12-01 06:16:30 +01:00
|
|
|
|
2008-12-16 14:16:56 +01:00
|
|
|
%% get_caps reads user caps from database
|
2008-12-23 14:58:38 +01:00
|
|
|
get_caps({U, S, R}) ->
|
2009-01-03 16:15:38 +01:00
|
|
|
BJID = exmpp_jid:jid_to_binary(U, S, R),
|
2008-12-23 14:58:38 +01:00
|
|
|
case catch mnesia:dirty_read({user_caps, BJID}) of
|
2008-12-16 14:16:56 +01:00
|
|
|
[#user_caps{caps=Caps}] ->
|
|
|
|
Caps;
|
|
|
|
_ ->
|
|
|
|
nothing
|
|
|
|
end.
|
|
|
|
|
|
|
|
%% clear_caps removes user caps from database
|
2009-01-12 19:15:44 +01:00
|
|
|
clear_caps(JID) ->
|
|
|
|
BJID = exmpp_jid:jid_to_binary(JID),
|
|
|
|
BUID = exmpp_jid:bare_jid_to_binary(JID),
|
2008-12-23 14:58:38 +01:00
|
|
|
catch mnesia:dirty_delete({user_caps, BJID}),
|
|
|
|
case catch mnesia:dirty_read({user_caps_default, BUID}) of
|
2009-01-19 15:47:33 +01:00
|
|
|
[#user_caps_default{resource=_R}] ->
|
2008-12-23 14:58:38 +01:00
|
|
|
catch mnesia:dirty_delete({user_caps_default, BUID});
|
|
|
|
_ ->
|
|
|
|
ok
|
|
|
|
end.
|
|
|
|
|
|
|
|
%% give default user resource
|
|
|
|
get_user_resource(U, S) ->
|
2009-01-03 16:15:38 +01:00
|
|
|
BUID = exmpp_jid:bare_jid_to_binary(U, S),
|
2008-12-23 14:58:38 +01:00
|
|
|
case catch mnesia:dirty_read({user_caps_default, BUID}) of
|
|
|
|
[#user_caps_default{resource=R}] ->
|
|
|
|
R;
|
|
|
|
_ ->
|
|
|
|
[]
|
|
|
|
end.
|
2008-12-16 14:16:56 +01:00
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
%% note_caps should be called to make the module request disco
|
|
|
|
%% information. Host is the host that asks, From is the full JID that
|
|
|
|
%% sent the caps packet, and Caps is what read_caps returned.
|
|
|
|
note_caps(Host, From, Caps) ->
|
|
|
|
case Caps of
|
2008-12-16 14:16:56 +01:00
|
|
|
nothing ->
|
|
|
|
ok;
|
2007-12-01 06:16:30 +01:00
|
|
|
_ ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
gen_server:cast(Proc, {note_caps, From, Caps})
|
|
|
|
end.
|
|
|
|
|
|
|
|
%% get_features returns a list of features implied by the given caps
|
|
|
|
%% record (as extracted by read_caps). It may block, and may signal a
|
|
|
|
%% timeout error.
|
|
|
|
get_features(Host, Caps) ->
|
|
|
|
case Caps of
|
2008-12-23 14:58:38 +01:00
|
|
|
nothing ->
|
|
|
|
[];
|
2007-12-01 06:16:30 +01:00
|
|
|
#caps{} ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
gen_server:call(Proc, {get_features, Caps})
|
|
|
|
end.
|
|
|
|
|
|
|
|
start_link(Host, Opts) ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
gen_server:start_link({local, Proc}, ?MODULE, [Host, Opts], []).
|
|
|
|
|
|
|
|
start(Host, Opts) ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
|
|
|
ChildSpec =
|
|
|
|
{Proc,
|
|
|
|
{?MODULE, start_link, [Host, Opts]},
|
|
|
|
transient,
|
|
|
|
1000,
|
|
|
|
worker,
|
|
|
|
[?MODULE]},
|
|
|
|
supervisor:start_child(ejabberd_sup, ChildSpec).
|
|
|
|
|
|
|
|
stop(Host) ->
|
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
2008-02-28 01:30:23 +01:00
|
|
|
gen_server:call(Proc, stop).
|
2007-12-01 06:16:30 +01:00
|
|
|
|
|
|
|
%%====================================================================
|
|
|
|
%% gen_server callbacks
|
|
|
|
%%====================================================================
|
|
|
|
|
|
|
|
init([Host, _Opts]) ->
|
|
|
|
mnesia:create_table(caps_features,
|
|
|
|
[{ram_copies, [node()]},
|
|
|
|
{attributes, record_info(fields, caps_features)}]),
|
2008-12-16 14:16:56 +01:00
|
|
|
mnesia:create_table(user_caps,
|
|
|
|
[{disc_copies, [node()]},
|
|
|
|
{attributes, record_info(fields, user_caps)}]),
|
2008-12-23 14:58:38 +01:00
|
|
|
mnesia:create_table(user_caps_default,
|
|
|
|
[{disc_copies, [node()]},
|
|
|
|
{attributes, record_info(fields, user_caps_default)}]),
|
2007-12-01 06:16:30 +01:00
|
|
|
{ok, #state{host = Host}}.
|
|
|
|
|
|
|
|
maybe_get_features(#caps{node = Node, version = Version, exts = Exts}) ->
|
|
|
|
SubNodes = [Version | Exts],
|
|
|
|
F = fun() ->
|
|
|
|
%% Make sure that we have all nodes we need to know.
|
|
|
|
%% If a single one is missing, we wait for more disco
|
|
|
|
%% responses.
|
|
|
|
lists:foldl(fun(SubNode, Acc) ->
|
|
|
|
case Acc of
|
|
|
|
fail -> fail;
|
|
|
|
_ ->
|
|
|
|
case mnesia:read({caps_features, {Node, SubNode}}) of
|
|
|
|
[] -> fail;
|
|
|
|
[#caps_features{features = Features}] -> Features ++ Acc
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end, [], SubNodes)
|
|
|
|
end,
|
|
|
|
case mnesia:transaction(F) of
|
|
|
|
{atomic, fail} ->
|
|
|
|
wait;
|
|
|
|
{atomic, Features} ->
|
|
|
|
{ok, Features}
|
|
|
|
end.
|
|
|
|
|
|
|
|
timestamp() ->
|
|
|
|
{MegaSecs, Secs, _MicroSecs} = now(),
|
|
|
|
MegaSecs * 1000000 + Secs.
|
2007-12-07 01:09:48 +01:00
|
|
|
|
2007-12-01 06:16:30 +01:00
|
|
|
handle_call({get_features, Caps}, From, State) ->
|
|
|
|
case maybe_get_features(Caps) of
|
|
|
|
{ok, Features} ->
|
|
|
|
{reply, Features, State};
|
|
|
|
wait ->
|
2007-12-22 00:36:44 +01:00
|
|
|
gen_server:cast(self(), visit_feature_queries),
|
2007-12-01 06:16:30 +01:00
|
|
|
Timeout = timestamp() + 10,
|
|
|
|
FeatureQueries = State#state.feature_queries,
|
|
|
|
NewFeatureQueries = [{From, Caps, Timeout} | FeatureQueries],
|
|
|
|
NewState = State#state{feature_queries = NewFeatureQueries},
|
|
|
|
{noreply, NewState}
|
|
|
|
end;
|
|
|
|
|
|
|
|
handle_call(stop, _From, State) ->
|
|
|
|
{stop, normal, ok, State}.
|
|
|
|
|
|
|
|
handle_cast({note_caps, From,
|
2008-12-16 14:16:56 +01:00
|
|
|
#caps{node = Node, version = Version, exts = Exts} = Caps},
|
2007-12-01 06:16:30 +01:00
|
|
|
#state{host = Host, disco_requests = Requests} = State) ->
|
|
|
|
%% XXX: this leads to race conditions where ejabberd will send
|
|
|
|
%% lots of caps disco requests.
|
2009-01-03 16:15:38 +01:00
|
|
|
%#jid{node = U, domain = S, resource = R} = From,
|
|
|
|
U = exmpp_jid:lnode(From),
|
|
|
|
S = exmpp_jid:ldomain(From),
|
|
|
|
R = exmpp_jid:resource(From),
|
|
|
|
BJID = exmpp_jid:jid_to_binary(From),
|
2008-12-23 14:58:38 +01:00
|
|
|
mnesia:dirty_write(#user_caps{jid = BJID, caps = Caps}),
|
|
|
|
case ejabberd_sm:get_user_resources(U, S) of
|
|
|
|
[] ->
|
|
|
|
ok;
|
|
|
|
_ ->
|
|
|
|
% only store default resource of external contacts
|
2009-01-03 16:15:38 +01:00
|
|
|
BUID = exmpp_jid:bare_jid_to_binary(From),
|
2008-12-23 14:58:38 +01:00
|
|
|
mnesia:dirty_write(#user_caps_default{uid = BUID, resource = R})
|
|
|
|
end,
|
2007-12-01 06:16:30 +01:00
|
|
|
SubNodes = [Version | Exts],
|
|
|
|
%% Now, find which of these are not already in the database.
|
|
|
|
Fun = fun() ->
|
|
|
|
lists:foldl(fun(SubNode, Acc) ->
|
|
|
|
case mnesia:read({caps_features, {Node, SubNode}}) of
|
|
|
|
[] ->
|
|
|
|
[SubNode | Acc];
|
|
|
|
_ ->
|
|
|
|
Acc
|
|
|
|
end
|
|
|
|
end, [], SubNodes)
|
|
|
|
end,
|
|
|
|
case mnesia:transaction(Fun) of
|
|
|
|
{atomic, Missing} ->
|
2008-12-16 14:16:56 +01:00
|
|
|
%% For each unknown caps "subnode", we send a disco request.
|
|
|
|
NewRequests = lists:foldl(
|
|
|
|
fun(SubNode, Dict) ->
|
2007-12-01 06:16:30 +01:00
|
|
|
ID = randoms:get_string(),
|
2008-07-17 17:26:48 +02:00
|
|
|
Query = exmpp_xml:set_attribute(
|
|
|
|
#xmlel{ns = ?NS_DISCO_INFO, name = 'query'},
|
|
|
|
'node', lists:concat([Node, "#", SubNode])),
|
|
|
|
Stanza = exmpp_iq:get(?NS_JABBER_CLIENT, Query, ID),
|
2007-12-01 06:16:30 +01:00
|
|
|
ejabberd_local:register_iq_response_handler
|
2009-01-10 17:10:12 +01:00
|
|
|
(list_to_binary(Host), ID, ?MODULE, handle_disco_response),
|
2009-01-21 14:34:26 +01:00
|
|
|
ejabberd_router:route(exmpp_jid:make_jid(Host),
|
2008-07-17 17:26:48 +02:00
|
|
|
From, Stanza),
|
2008-07-11 14:48:27 +02:00
|
|
|
timer:send_after(?CAPS_QUERY_TIMEOUT, self(), {disco_timeout, ID}),
|
2007-12-01 06:16:30 +01:00
|
|
|
?DICT:store(ID, {Node, SubNode}, Dict)
|
|
|
|
end, Requests, Missing),
|
|
|
|
{noreply, State#state{disco_requests = NewRequests}};
|
|
|
|
Error ->
|
|
|
|
?ERROR_MSG("Transaction failed: ~p", [Error]),
|
|
|
|
{noreply, State}
|
|
|
|
end;
|
2008-08-27 11:46:25 +02:00
|
|
|
handle_cast({disco_response, From, _To, #iq{id = ID, type = Type, payload = Payload}},
|
2007-12-01 06:16:30 +01:00
|
|
|
#state{disco_requests = Requests} = State) ->
|
2008-08-27 11:46:25 +02:00
|
|
|
case {Type, Payload} of
|
2008-07-22 16:51:19 +02:00
|
|
|
{result, #xmlel{name = 'query', children = Els}} ->
|
2007-12-01 06:16:30 +01:00
|
|
|
case ?DICT:find(ID, Requests) of
|
|
|
|
{ok, {Node, SubNode}} ->
|
|
|
|
Features =
|
2008-07-22 16:51:19 +02:00
|
|
|
lists:flatmap(fun(#xmlel{name = 'feature'} = F) ->
|
2009-01-21 14:34:26 +01:00
|
|
|
[exmpp_xml:get_attribute_as_list(F, 'var', "")];
|
2007-12-01 06:16:30 +01:00
|
|
|
(_) ->
|
|
|
|
[]
|
|
|
|
end, Els),
|
|
|
|
mnesia:transaction(
|
|
|
|
fun() ->
|
|
|
|
mnesia:write(#caps_features{node_pair = {Node, SubNode},
|
|
|
|
features = Features})
|
|
|
|
end),
|
|
|
|
gen_server:cast(self(), visit_feature_queries);
|
|
|
|
error ->
|
|
|
|
?ERROR_MSG("ID '~s' matches no query", [ID])
|
|
|
|
end;
|
2007-12-22 00:36:44 +01:00
|
|
|
{error, _} ->
|
2008-04-14 14:02:18 +02:00
|
|
|
%% XXX: if we get error, we cache empty feature not to probe the client continuously
|
|
|
|
case ?DICT:find(ID, Requests) of
|
|
|
|
{ok, {Node, SubNode}} ->
|
|
|
|
Features = [],
|
|
|
|
mnesia:transaction(
|
|
|
|
fun() ->
|
|
|
|
mnesia:write(#caps_features{node_pair = {Node, SubNode},
|
|
|
|
features = Features})
|
|
|
|
end),
|
|
|
|
gen_server:cast(self(), visit_feature_queries);
|
|
|
|
error ->
|
|
|
|
?ERROR_MSG("ID '~s' matches no query", [ID])
|
|
|
|
end;
|
|
|
|
%gen_server:cast(self(), visit_feature_queries),
|
2008-08-06 15:51:42 +02:00
|
|
|
%?DEBUG("Error IQ reponse from ~s:~n~p", [exmpp_jid:jid_to_list(From), SubEls]);
|
2008-08-27 11:46:25 +02:00
|
|
|
{result, Payload} ->
|
|
|
|
?DEBUG("Invalid IQ contents from ~s:~n~p", [exmpp_jid:jid_to_list(From), Payload]);
|
2007-12-01 06:16:30 +01:00
|
|
|
_ ->
|
|
|
|
%% Can't do anything about errors
|
|
|
|
ok
|
|
|
|
end,
|
|
|
|
NewRequests = ?DICT:erase(ID, Requests),
|
|
|
|
{noreply, State#state{disco_requests = NewRequests}};
|
2008-07-11 14:48:27 +02:00
|
|
|
handle_cast({disco_timeout, ID}, #state{host = Host, disco_requests = Requests} = State) ->
|
|
|
|
%% do not wait a response anymore for this IQ, client certainly will never answer
|
|
|
|
NewRequests = case ?DICT:is_key(ID, Requests) of
|
|
|
|
true ->
|
2009-01-10 17:10:12 +01:00
|
|
|
ejabberd_local:unregister_iq_response_handler(list_to_binary(Host), ID),
|
2008-07-11 14:48:27 +02:00
|
|
|
?DICT:erase(ID, Requests);
|
|
|
|
false ->
|
|
|
|
Requests
|
|
|
|
end,
|
|
|
|
{noreply, State#state{disco_requests = NewRequests}};
|
2007-12-01 06:16:30 +01:00
|
|
|
handle_cast(visit_feature_queries, #state{feature_queries = FeatureQueries} = State) ->
|
|
|
|
Timestamp = timestamp(),
|
|
|
|
NewFeatureQueries =
|
|
|
|
lists:foldl(fun({From, Caps, Timeout}, Acc) ->
|
|
|
|
case maybe_get_features(Caps) of
|
2007-12-22 00:36:44 +01:00
|
|
|
wait when Timeout > Timestamp -> [{From, Caps, Timeout} | Acc];
|
2007-12-01 06:16:30 +01:00
|
|
|
wait -> Acc;
|
|
|
|
{ok, Features} ->
|
|
|
|
gen_server:reply(From, Features),
|
|
|
|
Acc
|
|
|
|
end
|
|
|
|
end, [], FeatureQueries),
|
|
|
|
{noreply, State#state{feature_queries = NewFeatureQueries}}.
|
|
|
|
|
2008-08-27 11:46:25 +02:00
|
|
|
handle_disco_response(From, To, IQ_Rec) ->
|
2009-01-03 16:15:38 +01:00
|
|
|
Host = exmpp_jid:ldomain_as_list(To),
|
2007-12-01 06:16:30 +01:00
|
|
|
Proc = gen_mod:get_module_proc(Host, ?PROCNAME),
|
2008-08-27 11:46:25 +02:00
|
|
|
gen_server:cast(Proc, {disco_response, From, To, IQ_Rec}).
|
2007-12-01 06:16:30 +01:00
|
|
|
|
|
|
|
handle_info(_Info, State) ->
|
|
|
|
{noreply, State}.
|
|
|
|
|
|
|
|
terminate(_Reason, _State) ->
|
|
|
|
ok.
|
|
|
|
|
|
|
|
code_change(_OldVsn, State, _Extra) ->
|
|
|
|
{ok, State}.
|