Add XEP-0115 plugin.

Finally
This commit is contained in:
Lance Stout 2011-12-30 21:45:25 -05:00
parent 6722b0224a
commit 8a29ec67ac
4 changed files with 437 additions and 0 deletions

View file

@ -0,0 +1,11 @@
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz, Lance J.T. Stout
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
from sleekxmpp.plugins.xep_0115.stanza import Capabilities
from sleekxmpp.plugins.xep_0115.static import StaticCaps
from sleekxmpp.plugins.xep_0115.caps import xep_0115

View file

@ -0,0 +1,260 @@
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz, Lance J.T. Stout
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import logging
import hashlib
import base64
import sleekxmpp
from sleekxmpp import Presence, Iq
from sleekxmpp.xmlstream import register_stanza_plugin
from sleekxmpp.xmlstream.handler import Callback
from sleekxmpp.xmlstream.matcher import StanzaPath
from sleekxmpp.exceptions import XMPPError, IqError, IqTimeout
from sleekxmpp.plugins.base import base_plugin
from sleekxmpp.plugins.xep_0115 import stanza, StaticCaps
log = logging.getLogger(__name__)
class xep_0115(base_plugin):
"""
XEP-0115: Entity Capabalities
"""
def plugin_init(self):
self.xep = '0115'
self.description = 'Entity Capabilities'
self.stanza = stanza
self.hashes = {'sha-1': hashlib.sha1,
'md5': hashlib.md5}
self.hash = self.config.get('hash', 'sha-1')
self.caps_node = self.config.get('caps_node', None)
self.broadcast = self.config.get('broadcast', True)
if self.caps_node is None:
ver = sleekxmpp.__version__
self.caps_node = 'http://sleekxmpp.com/ver/%s' % ver
register_stanza_plugin(Presence, stanza.Capabilities)
self._disco_ops = ['cache_caps',
'get_caps',
'assign_verstring',
'get_verstring',
'supports',
'has_identity']
self.xmpp.register_handler(
Callback('Entity Capabilites',
StanzaPath('presence/caps'),
self._handle_caps))
self.xmpp.add_filter('out', self._filter_add_caps)
self.xmpp.add_event_handler('entity_caps', self._process_caps,
threaded=True)
def post_init(self):
base_plugin.post_init(self)
self.xmpp['xep_0030'].add_feature(stanza.Capabilities.namespace)
self.disco = self.xmpp['xep_0030']
self.static = StaticCaps(self.xmpp, self.disco.static)
for op in self._disco_ops:
self.disco._add_disco_op(op, getattr(self.static, op))
self._run_node_handler = self.disco._run_node_handler
self.disco.cache_caps = self.cache_caps
self.disco.update_caps = self.update_caps
self.disco.assign_verstring = self.assign_verstring
self.disco.get_verstring = self.get_verstring
def _filter_add_caps(self, stanza):
if isinstance(stanza, Presence) and self.broadcast:
ver = self.get_verstring(stanza['from'])
if ver:
stanza['caps']['node'] = self.caps_node
stanza['caps']['hash'] = self.hash
stanza['caps']['ver'] = ver
return stanza
def _handle_caps(self, presence):
if not self.xmpp.is_component:
if presence['from'] == self.xmpp.boundjid:
return
self.xmpp.event('entity_caps', presence)
def _process_caps(self, pres):
if not pres['caps']['hash']:
log.debug("Received unsupported legacy caps.")
self.xmpp.event('entity_caps_legacy', pres)
return
existing_verstring = self.get_verstring(pres['from'].full)
if str(existing_verstring) == str(pres['caps']['ver']):
return
if pres['caps']['hash'] not in self.hashes:
try:
log.debug("Unknown caps hash: %s", pres['caps']['hash'])
self.disco.get_info(jid=pres['from'])
return
except XMPPError:
return
log.debug("New caps verification string: %s", pres['caps']['ver'])
try:
caps = self.disco.get_info(
jid=pres['from'],
node='%s#%s' % (pres['caps']['node'],
pres['caps']['ver']))
if self._validate_caps(caps['disco_info'],
pres['caps']['hash'],
pres['caps']['ver']):
self.assign_verstring(pres['from'], pres['caps']['ver'])
except XMPPError:
log.debug("Could not retrieve disco#info results for caps")
def _validate_caps(self, caps, hash, check_verstring):
# Check Identities
full_ids = caps.get_identities(dedupe=False)
deduped_ids = caps.get_identities()
if len(full_ids) != len(deduped_ids):
log.debug("Duplicate disco identities found, invalid for caps")
return False
# Check Features
full_features = caps.get_features(dedupe=False)
deduped_features = caps.get_features()
if len(full_features) != len(deduped_features):
log.debug("Duplicate disco features found, invalid for caps")
return False
# Check Forms
form_types = []
deduped_form_types = set()
for stanza in caps['substanzas']:
if isinstance(stanza, self.xmpp['xep_0004'].stanza.Form):
if 'FORM_TYPE' in stanza['fields']:
f_type = tuple(stanza['fields']['FORM_TYPE']['value'])
form_types.append(f_type)
deduped_form_types.add(f_type)
if len(form_types) != len(deduped_form_types):
log.debug("Duplicated FORM_TYPE values, invalid for caps")
return False
if len(f_type) > 1:
deduped_type = set(f_type)
if len(f_type) != len(deduped_type):
log.debug("Extra FORM_TYPE data, invalid for caps")
return False
if stanza['fields']['FORM_TYPE']['type'] != 'hidden':
log.debug("Field FORM_TYPE type not 'hidden', ignoring form for caps")
caps.xml.remove(stanza.xml)
else:
log.debug("No FORM_TYPE found, ignoring form for caps")
caps.xml.remove(stanza.xml)
verstring = self.generate_verstring(caps, hash)
if verstring != check_verstring:
log.debug("Verification strings do not match: %s, %s" % (
verstring, check_verstring))
return False
self.cache_caps(verstring, caps)
return True
def generate_verstring(self, info, hash):
hash = self.hashes.get(hash, None)
if hash is None:
return None
S = ''
# Convert None to '' in the identities
def clean_identity(id):
return map(lambda i: i or '', id)
identities = map(clean_identity, info['identities'])
identities = sorted(('/'.join(i) for i in identities))
features = sorted(info['features'])
S += '<'.join(identities) + '<'
S += '<'.join(features) + '<'
form_types = {}
for stanza in info['substanzas']:
if isinstance(stanza, self.xmpp['xep_0004'].stanza.Form):
if 'FORM_TYPE' in stanza['fields']:
f_type = stanza['values']['FORM_TYPE']
if len(f_type):
f_type = f_type[0]
if f_type not in form_types:
form_types[f_type] = []
form_types[f_type].append(stanza)
sorted_forms = sorted(form_types.keys())
for f_type in sorted_forms:
for form in form_types[f_type]:
S += '%s<' % f_type
fields = sorted(form['fields'].keys())
fields.remove('FORM_TYPE')
for field in fields:
S += '%s<' % field
vals = form['fields'][field].get_value(convert=False)
if vals is None:
S += '<'
else:
if not isinstance(vals, list):
vals = [vals]
S += '<'.join(sorted(vals)) + '<'
binary = hash(S.encode('utf8')).digest()
return base64.b64encode(binary)
def update_caps(self, jid=None, node=None):
info = self.disco.get_info(jid, node, local=True)
if isinstance(info, Iq):
info = info['disco_info']
ver = self.generate_verstring(info, self.hash)
self.cache_caps(ver, info)
self.assign_verstring(jid, ver)
def get_verstring(self, jid=None):
return self._run_node_handler('get_verstring', jid)
def assign_verstring(self, jid=None, verstring=None):
if jid in (None, ''):
jid = self.xmpp.boundjid.full
return self._run_node_handler('assign_verstring', jid,
data={'verstring': verstring})
def cache_caps(self, verstring=None, info=None):
data = {'verstring': verstring, 'info': info}
return self._run_node_handler('cache_caps', None, None, data=data)
def get_caps(self, jid=None, verstring=None):
if verstring is None:
if jid is not None:
verstring = self.get_verstring(jid)
else:
return None
data = {'verstring': verstring}
return self._run_node_handler('get_caps', jid, None, None, data)

View file

@ -0,0 +1,19 @@
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz, Lance J.T. Stout
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
from __future__ import unicode_literals
from sleekxmpp.xmlstream import ElementBase, ET
class Capabilities(ElementBase):
namespace = 'http://jabber.org/protocol/caps'
name = 'c'
plugin_attrib = 'caps'
interfaces = set(('hash', 'node', 'ver', 'ext'))

View file

@ -0,0 +1,147 @@
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2011 Nathanael C. Fritz, Lance J.T. Stout
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import logging
import sleekxmpp
from sleekxmpp.xmlstream import JID
from sleekxmpp.plugins.xep_0030 import StaticDisco
log = logging.getLogger(__name__)
class StaticCaps(object):
"""
Extend the default StaticDisco implementation to provide
support for extended identity information.
"""
def __init__(self, xmpp, static):
"""
Augment the default XEP-0030 static handler object.
Arguments:
static -- The default static XEP-0030 handler object.
"""
self.xmpp = xmpp
self.disco = self.xmpp['xep_0030']
self.caps = self.xmpp['xep_0115']
self.static = static
self.ver_cache = {}
self.jid_vers = {}
def supports(self, jid, node, ifrom, data):
"""
Check if a JID supports a given feature.
The data parameter may provide:
feature -- The feature to check for support.
local -- If true, then the query is for a JID/node
combination handled by this Sleek instance and
no stanzas need to be sent.
Otherwise, a disco stanza must be sent to the
remove JID to retrieve the info.
cached -- If true, then look for the disco info data from
the local cache system. If no results are found,
send the query as usual. The self.use_cache
setting must be set to true for this option to
be useful. If set to false, then the cache will
be skipped, even if a result has already been
cached. Defaults to false.
"""
feature = data.get('feature', None)
data = {'local': data.get('local', False),
'cached': data.get('cached', True)}
if not feature:
return False
if node in (None, ''):
info = self.caps.get_caps(jid)
if info and feature in info['features']:
return True
try:
info = self.disco.get_info(jid=jid, node=node,
ifrom=ifrom, **data)
info = self.disco._wrap(ifrom, jid, info, True)
return feature in info['disco_info']['features']
except IqError:
return False
except IqTimeout:
return None
def has_identity(self, jid, node, ifrom, data):
"""
Check if a JID has a given identity.
The data parameter may provide:
category -- The category of the identity to check.
itype -- The type of the identity to check.
lang -- The language of the identity to check.
local -- If true, then the query is for a JID/node
combination handled by this Sleek instance and
no stanzas need to be sent.
Otherwise, a disco stanza must be sent to the
remove JID to retrieve the info.
cached -- If true, then look for the disco info data from
the local cache system. If no results are found,
send the query as usual. The self.use_cache
setting must be set to true for this option to
be useful. If set to false, then the cache will
be skipped, even if a result has already been
cached. Defaults to false.
"""
identity = (data.get('category', None),
data.get('itype', None),
data.get('lang', None))
data = {'local': data.get('local', False),
'cached': data.get('cached', True)}
trunc = lambda i: (i[0], i[1], i[2])
if node in (None, ''):
info = self.caps.get_caps(jid)
if info and identity in map(trunc, info['identities']):
return True
try:
info = self.disco.get_info(jid=jid, node=node,
ifrom=ifrom, **data)
info = self.disco._wrap(ifrom, jid, info, True)
return identity in map(trunc, info['disco_info']['identities'])
except IqError:
return False
except IqTimeout:
return None
def cache_caps(self, jid, node, ifrom, data):
with self.static.lock:
verstring = data.get('verstring', None)
info = data.get('info', None)
if not verstring or not info:
return
self.ver_cache[verstring] = info
def assign_verstring(self, jid, node, ifrom, data):
with self.static.lock:
if isinstance(jid, JID):
jid = jid.full
self.jid_vers[jid] = data.get('verstring', None)
def get_verstring(self, jid, node, ifrom, data):
with self.static.lock:
return self.jid_vers.get(jid, None)
def get_caps(self, jid, node, ifrom, data):
with self.static.lock:
return self.ver_cache.get(data.get('verstring', None), None)