Search is not available for this dataset
text stringlengths 75 104k |
|---|
def export(self):
"""
This method deactivates the security context for the calling process and returns an
interprocess token which, when passed to :meth:`imprt` in another process, will re-activate
the context in the second process. Only a single instantiation of a given context may be
active at any one time; attempting to access this security context after calling
:meth:`export` will fail. This method can only be used on a valid context where
:attr:`is_transferable` is True.
:returns: a token which represents this security context
:rtype: bytes
"""
if not (self.flags & C.GSS_C_TRANS_FLAG):
raise GSSException("Context is not transferable.")
if not self._ctx:
raise GSSException("Can't export empty/invalid context.")
minor_status = ffi.new('OM_uint32[1]')
output_token_buffer = ffi.new('gss_buffer_desc[1]')
retval = C.gss_export_sec_context(
minor_status,
self._ctx,
output_token_buffer
)
try:
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type)
else:
raise _exception_for_status(retval, minor_status[0])
exported_token = _buf_to_str(output_token_buffer[0])
# Set our context to a 'blank' context
self._ctx = ffi.new('gss_ctx_id_t[1]')
return exported_token
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer) |
def imprt(import_token):
"""
This is the corresponding method to :meth:`export`, used to import a saved context token
from another process into this one and construct a :class:`Context` object from it.
:param import_token: a token obtained from the :meth:`export` of another context
:type import_token: bytes
:returns: a Context object created from the imported token
:rtype: :class:`Context`
"""
minor_status = ffi.new('OM_uint32[1]')
import_token_buffer = ffi.new('gss_buffer_desc[1]')
import_token_buffer[0].length = len(import_token)
c_str_import_token = ffi.new('char[]', import_token)
import_token_buffer[0].value = c_str_import_token
new_context = ffi.new('gss_ctx_id_t[1]')
retval = C.gss_import_sec_context(
minor_status,
import_token_buffer,
new_context
)
try:
if GSS_ERROR(retval):
raise _exception_for_status(retval, minor_status[0])
src_name = ffi.new('gss_name_t[1]')
target_name = ffi.new('gss_name_t[1]')
mech_type = ffi.new('gss_OID[1]')
flags = ffi.new('OM_uint32[1]')
locally_initiated = ffi.new('int[1]')
established = ffi.new('int[1]')
retval = C.gss_inquire_context(
minor_status,
new_context[0],
src_name,
target_name,
ffi.NULL, # lifetime_rec
mech_type,
flags,
locally_initiated,
established
)
src_name = Name(src_name)
target_name = Name(target_name)
if GSS_ERROR(retval):
raise _exception_for_status(retval, minor_status[0])
mech = OID(mech_type[0][0]) if mech_type[0] else None
if locally_initiated:
new_context_obj = InitContext(target_name, mech_type=mech)
else:
new_context_obj = AcceptContext()
new_context_obj.peer_name = src_name
new_context_obj.mech_type = mech
new_context_obj.flags = flags[0]
new_context_obj.established = bool(established[0])
new_context_obj._ctx = ffi.gc(new_context, _release_gss_ctx_id_t)
return new_context_obj
except:
if new_context[0]:
C.gss_delete_sec_context(
minor_status,
new_context,
ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER)
)
raise |
def lifetime(self):
"""
The lifetime of the context in seconds (only valid after :meth:`step` has been called). If
the context does not have a time limit on its validity, this will be
:const:`gssapi.C_INDEFINITE`
"""
minor_status = ffi.new('OM_uint32[1]')
lifetime_rec = ffi.new('OM_uint32[1]')
retval = C.gss_inquire_context(
minor_status,
self._ctx[0],
ffi.NULL, # src_name
ffi.NULL, # target_name
lifetime_rec,
ffi.NULL, # mech_type
ffi.NULL, # ctx_flags
ffi.NULL, # locally_initiated
ffi.NULL # established
)
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type)
else:
raise _exception_for_status(retval, minor_status[0])
return lifetime_rec[0] |
def delete(self):
"""
Delete a security context. This method will delete the local data structures associated
with the specified security context, and may return an output token, which when passed to
:meth:`process_context_token` on the peer may instruct it to also delete its context.
RFC 2744 recommends that GSSAPI mechanisms do not emit any output token when they're
deleted, so this behaviour could be considered deprecated.
After this method is called, this security context will become invalid and should not be
used in any way.
:returns: An output token if one was emitted by the GSSAPI mechanism, otherwise an empty
bytestring.
:rtype: bytes
"""
if not self._ctx[0]:
raise GSSException("Can't delete invalid context")
output_token_buffer = ffi.new('gss_buffer_desc[1]')
minor_status = ffi.new('OM_uint32[1]')
retval = C.gss_delete_sec_context(
minor_status,
self._ctx,
output_token_buffer
)
self._ctx = ffi.new('gss_ctx_id_t[1]')
self._reset_flags()
try:
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type)
else:
raise _exception_for_status(retval, minor_status[0])
return _buf_to_str(output_token_buffer[0])
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer) |
def step(self, input_token=None):
"""Performs a step to establish the context as an initiator.
This method should be called in a loop and fed input tokens from the acceptor, and its
output tokens should be sent to the acceptor, until this context's :attr:`established`
attribute is True.
:param input_token: The input token from the acceptor (omit this param or pass None on
the first call).
:type input_token: bytes
:returns: either a byte string with the next token to send to the acceptor,
or None if there is no further token to send to the acceptor.
:raises: :exc:`~gssapi.error.GSSException` if there is an error establishing the context.
"""
minor_status = ffi.new('OM_uint32[1]')
if input_token:
input_token_buffer = ffi.new('gss_buffer_desc[1]')
input_token_buffer[0].length = len(input_token)
c_str_input_token = ffi.new('char[]', input_token)
input_token_buffer[0].value = c_str_input_token
else:
input_token_buffer = ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER)
if isinstance(self._desired_mech, OID):
desired_mech = ffi.addressof(self._desired_mech._oid)
else:
desired_mech = ffi.cast('gss_OID', C.GSS_C_NO_OID)
actual_mech = ffi.new('gss_OID[1]')
output_token_buffer = ffi.new('gss_buffer_desc[1]')
actual_flags = ffi.new('OM_uint32[1]')
actual_time = ffi.new('OM_uint32[1]')
if self._cred_object is not None:
cred = self._cred_object._cred[0]
else:
cred = ffi.cast('gss_cred_id_t', C.GSS_C_NO_CREDENTIAL)
retval = C.gss_init_sec_context(
minor_status,
cred,
self._ctx,
self.peer_name._name[0],
desired_mech,
self._req_flags,
self._time_req,
self._channel_bindings,
input_token_buffer,
actual_mech,
output_token_buffer,
actual_flags,
actual_time
)
try:
if output_token_buffer[0].length != 0:
out_token = _buf_to_str(output_token_buffer[0])
else:
out_token = None
if GSS_ERROR(retval):
if minor_status[0] and actual_mech[0]:
raise _exception_for_status(retval, minor_status[0], actual_mech[0], out_token)
else:
raise _exception_for_status(retval, minor_status[0], None, out_token)
self.established = not (retval & C.GSS_S_CONTINUE_NEEDED)
self.flags = actual_flags[0]
if actual_mech[0]:
self.mech_type = OID(actual_mech[0][0])
return out_token
except:
if self._ctx[0]:
C.gss_delete_sec_context(
minor_status,
self._ctx,
ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER)
)
self._reset_flags()
raise
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer) |
def step(self, input_token):
"""Performs a step to establish the context as an acceptor.
This method should be called in a loop and fed input tokens from the initiator, and its
output tokens should be sent to the initiator, until this context's :attr:`established`
attribute is True.
:param input_token: The input token from the initiator (required).
:type input_token: bytes
:returns: either a byte string with the next token to send to the initiator,
or None if there is no further token to send to the initiator.
:raises: :exc:`~gssapi.error.GSSException` if there is an error establishing the context.
"""
minor_status = ffi.new('OM_uint32[1]')
input_token_buffer = ffi.new('gss_buffer_desc[1]')
input_token_buffer[0].length = len(input_token)
c_str_import_token = ffi.new('char[]', input_token)
input_token_buffer[0].value = c_str_import_token
mech_type = ffi.new('gss_OID[1]')
output_token_buffer = ffi.new('gss_buffer_desc[1]')
src_name_handle = ffi.new('gss_name_t[1]')
actual_flags = ffi.new('OM_uint32[1]')
time_rec = ffi.new('OM_uint32[1]')
delegated_cred_handle = ffi.new('gss_cred_id_t[1]')
if self._cred_object is not None:
cred = self._cred_object._cred[0]
else:
cred = ffi.cast('gss_cred_id_t', C.GSS_C_NO_CREDENTIAL)
retval = C.gss_accept_sec_context(
minor_status,
self._ctx,
cred,
input_token_buffer,
self._channel_bindings,
src_name_handle,
mech_type,
output_token_buffer,
actual_flags,
time_rec,
delegated_cred_handle
)
if src_name_handle[0]:
src_name = MechName(src_name_handle, mech_type[0]) # make sure src_name is GC'd
try:
if output_token_buffer[0].length != 0:
out_token = _buf_to_str(output_token_buffer[0])
else:
out_token = None
if GSS_ERROR(retval):
if minor_status[0] and mech_type[0]:
raise _exception_for_status(retval, minor_status[0], mech_type[0], out_token)
else:
raise _exception_for_status(retval, minor_status[0], None, out_token)
self.established = not (retval & C.GSS_S_CONTINUE_NEEDED)
self.flags = actual_flags[0]
if (self.flags & C.GSS_C_DELEG_FLAG):
self.delegated_cred = Credential(delegated_cred_handle)
if mech_type[0]:
self.mech_type = OID(mech_type[0][0])
if src_name_handle[0]:
src_name._mech_type = self.mech_type
self.peer_name = src_name
return out_token
except:
if self._ctx:
C.gss_delete_sec_context(
minor_status,
self._ctx,
ffi.cast('gss_buffer_t', C.GSS_C_NO_BUFFER)
)
self._reset_flags()
raise
finally:
if output_token_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_token_buffer)
# if self.delegated_cred is present, it will handle gss_release_cred:
if delegated_cred_handle[0] and not self.delegated_cred:
C.gss_release_cred(minor_status, delegated_cred_handle) |
def mechs(self):
"""
The set of mechanisms supported by the credential.
:type: :class:`~gssapi.oids.OIDSet`
"""
if not self._mechs:
self._mechs = self._inquire(False, False, False, True)[3]
return self._mechs |
def export(self):
"""
Serializes this credential into a byte string, which can be passed to :meth:`imprt` in
another process in order to deserialize the byte string back into a credential. Exporting
a credential does not destroy it.
:returns: The serialized token representation of this credential.
:rtype: bytes
:raises: :exc:`~gssapi.error.GSSException` if there is a problem with exporting the
credential.
:exc:`NotImplementedError` if the underlying GSSAPI implementation does not
support the ``gss_export_cred`` C function.
"""
if not hasattr(C, 'gss_export_cred'):
raise NotImplementedError("The GSSAPI implementation does not support gss_export_cred")
minor_status = ffi.new('OM_uint32[1]')
output_buffer = ffi.new('gss_buffer_desc[1]')
retval = C.gss_export_cred(minor_status, self._cred[0], output_buffer)
try:
if GSS_ERROR(retval):
raise _exception_for_status(retval, minor_status[0])
return _buf_to_str(output_buffer[0])
finally:
if output_buffer[0].length != 0:
C.gss_release_buffer(minor_status, output_buffer) |
def imprt(cls, token):
"""
Deserializes a byte string token into a :class:`Credential` object. The token must have
previously been exported by the same GSSAPI implementation as is being used to import it.
:param token: A token previously obtained from the :meth:`export` of another
:class:`Credential` object.
:type token: bytes
:returns: A :class:`Credential` object constructed from the token.
:raises: :exc:`~gssapi.error.GSSException` if there is a problem with importing the
credential.
:exc:`NotImplementedError` if the underlying GSSAPI implementation does not
support the ``gss_import_cred`` C function.
"""
if not hasattr(C, 'gss_import_cred'):
raise NotImplementedError("The GSSAPI implementation does not support gss_import_cred")
minor_status = ffi.new('OM_uint32[1]')
token_buffer = ffi.new('gss_buffer_desc[1]')
token_buffer[0].length = len(token)
c_str_token = ffi.new('char[]', token)
token_buffer[0].value = c_str_token
imported_cred = ffi.new('gss_cred_id_t[1]')
retval = C.gss_import_cred(minor_status, token_buffer, imported_cred)
try:
if GSS_ERROR(retval):
raise _exception_for_status(retval, minor_status[0])
return cls(imported_cred)
except:
_release_gss_cred_id_t(imported_cred)
raise |
def store(self, usage=None, mech=None, overwrite=False, default=False, cred_store=None):
"""
Stores this credential into a 'credential store'. It can either store this credential in
the default credential store, or into a specific credential store specified by a set of
mechanism-specific key-value pairs. The former method of operation requires that the
underlying GSSAPI implementation supports the ``gss_store_cred`` C function, the latter
method requires support for the ``gss_store_cred_into`` C function.
:param usage: Optional parameter specifying whether to store the initiator, acceptor, or
both usages of this credential. Defaults to the value of this credential's
:attr:`usage` property.
:type usage: One of :data:`~gssapi.C_INITIATE`, :data:`~gssapi.C_ACCEPT` or
:data:`~gssapi.C_BOTH`
:param mech: Optional parameter specifying a single mechanism to store the credential
element for. If not supplied, all mechanisms' elements in this credential will be
stored.
:type mech: :class:`~gssapi.oids.OID`
:param overwrite: If True, indicates that any credential for the same principal in the
credential store should be overwritten with this credential.
:type overwrite: bool
:param default: If True, this credential should be made available as the default
credential when stored, for acquisition when no `desired_name` parameter is passed
to :class:`Credential` or for use when no credential is passed to
:class:`~gssapi.ctx.InitContext` or :class:`~gssapi.ctx.AcceptContext`. This is only
an advisory parameter to the GSSAPI implementation.
:type default: bool
:param cred_store: Optional dict or list of (key, value) pairs indicating the credential
store to use. The interpretation of these values will be mechanism-specific.
:type cred_store: dict, or list of (str, str)
:returns: A pair of values indicating the set of mechanism OIDs for which credential
elements were successfully stored, and the usage of the credential that was stored.
:rtype: tuple(:class:`~gssapi.oids.OIDSet`, int)
:raises: :exc:`~gssapi.error.GSSException` if there is a problem with storing the
credential.
:exc:`NotImplementedError` if the underlying GSSAPI implementation does not
support the ``gss_store_cred`` or ``gss_store_cred_into`` C functions.
"""
if usage is None:
usage = self.usage
if isinstance(mech, OID):
oid_ptr = ffi.addressof(mech._oid)
else:
oid_ptr = ffi.cast('gss_OID', C.GSS_C_NO_OID)
minor_status = ffi.new('OM_uint32[1]')
elements_stored = ffi.new('gss_OID_set[1]')
usage_stored = ffi.new('gss_cred_usage_t[1]')
if cred_store is None:
if not hasattr(C, 'gss_store_cred'):
raise NotImplementedError("The GSSAPI implementation does not support "
"gss_store_cred")
retval = C.gss_store_cred(
minor_status,
self._cred[0],
ffi.cast('gss_cred_usage_t', usage),
oid_ptr,
ffi.cast('OM_uint32', overwrite),
ffi.cast('OM_uint32', default),
elements_stored,
usage_stored
)
else:
if not hasattr(C, 'gss_store_cred_into'):
raise NotImplementedError("The GSSAPI implementation does not support "
"gss_store_cred_into")
c_strings, elements, cred_store_kv_set = _make_kv_set(cred_store)
retval = C.gss_store_cred_into(
minor_status,
self._cred[0],
ffi.cast('gss_cred_usage_t', usage),
oid_ptr,
ffi.cast('OM_uint32', overwrite),
ffi.cast('OM_uint32', default),
cred_store_kv_set,
elements_stored,
usage_stored
)
try:
if GSS_ERROR(retval):
if oid_ptr:
raise _exception_for_status(retval, minor_status[0], oid_ptr)
else:
raise _exception_for_status(retval, minor_status[0])
except:
if elements_stored[0]:
C.gss_release_oid_set(minor_status, elements_stored)
raise
return (OIDSet(elements_stored), usage_stored[0]) |
def get_all_mechs():
"""
Return an :class:`OIDSet` of all the mechanisms supported by the underlying GSSAPI
implementation.
"""
minor_status = ffi.new('OM_uint32[1]')
mech_set = ffi.new('gss_OID_set[1]')
try:
retval = C.gss_indicate_mechs(minor_status, mech_set)
if GSS_ERROR(retval):
raise _exception_for_status(retval, minor_status[0])
except:
_release_OID_set(mech_set)
raise
return OIDSet(oid_set=mech_set) |
def mech_from_string(input_string):
"""
Takes a string form of a mechanism OID, in dot-separated: "1.2.840.113554.1.2.2" or numeric
ASN.1: "{1 2 840 113554 1 2 2}" notation, and returns an :class:`OID` object representing
the mechanism, which can be passed to other GSSAPI methods.
:param input_string: a string representing the desired mechanism OID.
:returns: the mechanism OID.
:rtype: :class:`OID`
:raises: ValueError if the the input string is ill-formatted.
:raises: KeyError if the mechanism identified by the string is not supported by the
underlying GSSAPI implementation.
"""
if not re.match(r'^\d+(\.\d+)*$', input_string):
if re.match(r'^\{\d+( \d+)*\}$', input_string):
input_string = ".".join(input_string[1:-1].split())
else:
raise ValueError(input_string)
for mech in get_all_mechs():
if input_string == str(mech):
return mech
raise KeyError("Unknown mechanism: {0}".format(input_string)) |
def singleton_set(cls, single_oid):
"""
Factory function to create a new :class:`OIDSet` with a single member.
:param single_oid: the OID to use as a member of the new set
:type single_oid: :class:`OID`
:returns: an OID set with the OID passed in as the only member
:rtype: :class:`OIDSet`
"""
new_set = cls()
oid_ptr = None
if isinstance(single_oid, OID):
oid_ptr = ffi.addressof(single_oid._oid)
elif isinstance(single_oid, ffi.CData):
if ffi.typeof(single_oid) == ffi.typeof('gss_OID_desc'):
oid_ptr = ffi.addressof(single_oid)
elif ffi.typeof(single_oid) == ffi.typeof('gss_OID'):
oid_ptr = single_oid
if oid_ptr is None:
raise TypeError("Expected a gssapi.oids.OID, got " + str(type(single_oid)))
minor_status = ffi.new('OM_uint32[1]')
retval = C.gss_add_oid_set_member(minor_status, oid_ptr, new_set._oid_set)
if GSS_ERROR(retval):
raise _exception_for_status(retval, minor_status[0])
return new_set |
def add(self, new_oid):
"""
Adds another :class:`OID` to this set.
:param new_oid: the OID to add.
:type new_oid: :class:`OID`
"""
if self._oid_set[0]:
oid_ptr = None
if isinstance(new_oid, OID):
oid_ptr = ffi.addressof(new_oid._oid)
elif isinstance(new_oid, ffi.CData):
if ffi.typeof(new_oid) == ffi.typeof('gss_OID_desc'):
oid_ptr = ffi.addressof(new_oid)
elif ffi.typeof(new_oid) == ffi.typeof('gss_OID'):
oid_ptr = new_oid
if oid_ptr is None:
raise TypeError("Expected a gssapi.oids.OID, got " + str(type(new_oid)))
minor_status = ffi.new('OM_uint32[1]')
retval = C.gss_add_oid_set_member(minor_status, oid_ptr, self._oid_set)
if GSS_ERROR(retval):
raise _exception_for_status(retval, minor_status[0])
else:
raise GSSException("Cannot add a member to this OIDSet, its gss_OID_set is NULL!") |
def main(properties=properties, options=options, **custom_options):
"""Imports and runs setup function with given properties."""
return init(**dict(options, **custom_options))(**properties) |
def init(
dist='dist',
minver=None,
maxver=None,
use_markdown_readme=True,
use_stdeb=False,
use_distribute=False,
):
"""Imports and returns a setup function.
If use_markdown_readme is set,
then README.md is added to setuptools READMES list.
If use_stdeb is set on a Debian based system,
then module stdeb is imported.
Stdeb supports building deb packages on Debian based systems.
The package should only be installed on the same system version
it was built on, though. See http://github.com/astraw/stdeb.
If use_distribute is set, then distribute_setup.py is imported.
"""
if not minver == maxver == None:
import sys
if not minver <= sys.version < (maxver or 'Any'):
sys.stderr.write(
'%s: requires python version in <%s, %s), not %s\n' % (
sys.argv[0], minver or 'any', maxver or 'any', sys.version.split()[0]))
sys.exit(1)
if use_distribute:
from distribute_setup import use_setuptools
use_setuptools(to_dir=dist)
from setuptools import setup
else:
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if use_markdown_readme:
try:
import setuptools.command.sdist
setuptools.command.sdist.READMES = tuple(list(getattr(setuptools.command.sdist, 'READMES', ()))
+ ['README.md'])
except ImportError:
pass
if use_stdeb:
import platform
if 'debian' in platform.dist():
try:
import stdeb
except ImportError:
pass
return setup |
def main(context=None, *args, **kwargs):
"""
kwargs:
'command_publish_address': in the form of `tcp://*:5555`
or any other zeromq address format. IE `ipc://*:5555`
'command_subscribe_address': in the form of `tcp://*:5555`
or any other zeromq address format. IE `ipc://*:5555`
'audio_publish_address': in the form of `tcp://*:5555`
or any other zeromq address format. IE `ipc://*:5555`
"""
# Get configuration
args = _get_command_line_args()
# Get settings filepath
settings_filepath = args.get('settings_path')
# Get settings using the settings filepath
settings = _get_settings(settings_filepath)
settings = settings.get('microphone', {})
# TODO: verify that this doesn't break things in new and interesting ways
settings.update(kwargs)
plugin_manager = pluginmanager.PluginInterface()
plugin_manager.set_entry_points('microphone.audioengines')
plugins = plugin_manager.collect_entry_point_plugins(return_dict=True)
# find the audio driver or stick to the default of `pyaudio`
audio_driver = settings.get('audio_driver', 'pyaudio')
try:
# NOTE: `AudioDriver` is a class
AudioDriver = plugins[audio_driver]
# Fail early if we can't find the plugin we're looking for
except KeyError:
logging.error('Audio driver set in microphone settings of {} not foun'
'd. Please install or fix your settings file.')
logging.error('Plugins available: {}'.format(list(plugins.keys())))
sys.exit(1)
# TODO: Assume that a computer will only use one audio driver?
# Also assume that microphones may be physcially displaced from each other
# which means that they might record simultaneously
# FIXME: these are not good default addresses
command_publish_address = settings.get('publish_address',
'tcp://127.0.0.1:6910')
command_subscribe_address = settings.get('subscribe_address',
'tcp://127.0.0.1:6823')
audio_publish_address = settings.get('audio_publish_address',
'tcp://127.0.0.1:5012')
messaging = Messaging(command_publish_address,
command_subscribe_address,
audio_publish_address)
audio_driver = AudioDriver(messaging, settings)
audio_driver.run() |
def _create_file():
"""
Returns a file handle which is used to record audio
"""
f = wave.open('audio.wav', mode='wb')
f.setnchannels(2)
p = pyaudio.PyAudio()
f.setsampwidth(p.get_sample_size(pyaudio.paInt16))
f.setframerate(p.get_default_input_device_info()['defaultSampleRate'])
try:
yield f
finally:
f.close() |
def get_devices(self, device_type='all'):
num_devices = self._pyaudio.get_device_count()
self._logger.debug('Found %d PyAudio devices', num_devices)
for i in range(num_devices):
info = self._pyaudio.get_device_info_by_index(i)
name = info['name']
if name in self.devices:
continue
else:
self.devices[name] = PyAudioDevice(self, info)
return self.devices
"""
if device_type == plugin.audioengine.DEVICE_TYPE_ALL:
return devs
else:
return [device for device in devs if device_type in device.types]
""" |
def open_stream(self,
bits,
channels,
rate=None,
chunksize=1024,
output=True):
if rate is None:
rate = int(self.info['defaultSampleRate'])
# Check if format is supported
is_supported_fmt = self.supports_format(bits, channels, rate,
output=output)
if not is_supported_fmt:
msg_fmt = ("PyAudioDevice {index} ({name}) doesn't support " +
"%s format (Int{bits}, {channels}-channel at" +
" {rate} Hz)") % ('output' if output else 'input')
msg = msg_fmt.format(index=self.index,
name=self.name,
bits=bits,
channels=channels,
rate=rate)
self._logger.critical(msg)
raise plugin.audioengine.UnsupportedFormat(msg)
# Everything looks fine, open the stream
direction = ('output' if output else 'input')
stream_kwargs = {
'format': bits_to_samplefmt(bits),
'channels': channels,
'rate': rate,
'output': output,
'input': not output,
('%s_device_index' % direction): self._index,
'frames_per_buffer': chunksize if output else chunksize*8 # Hacky
}
stream = self._engine._pyaudio.open(**stream_kwargs)
"""
self._logger.debug("%s stream opened on device '%s' (%d Hz, %d " +
"channel, %d bit)", "output" if output else "input",
self.slug, rate, channels, bits)
"""
try:
yield stream
finally:
stream.close()
"""
self._logger.debug("%s stream closed on device '%s'",
"output" if output else "input", self.slug)
""" |
def djfrontend_h5bp_css(version=None):
"""
Returns HTML5 Boilerplate CSS file.
Included in HTML5 Boilerplate.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_H5BP_CSS', DJFRONTEND_H5BP_CSS_DEFAULT)
return format_html(
'<link rel="stylesheet" href="{0}djfrontend/css/h5bp/{1}/h5bp.css">',
_static_url, version) |
def djfrontend_normalize(version=None):
"""
Returns Normalize CSS file.
Included in HTML5 Boilerplate.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_NORMALIZE', DJFRONTEND_NORMALIZE_DEFAULT)
return format_html(
'<link rel="stylesheet" href="{0}djfrontend/css/normalize/{1}/normalize.css">',
_static_url, version) |
def djfrontend_fontawesome(version=None):
"""
Returns Font Awesome CSS file.
TEMPLATE_DEBUG returns full file, otherwise returns minified file.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_FONTAWESOME', DJFRONTEND_FONTAWESOME_DEFAULT)
return format_html(
'<link rel="stylesheet" href="{0}djfrontend/css/fontawesome/{1}/font-awesome{2}.css">',
_static_url, version, _min) |
def djfrontend_modernizr(version=None):
"""
Returns Modernizr JavaScript file according to version number.
TEMPLATE_DEBUG returns full file, otherwise returns minified file.
Included in HTML5 Boilerplate.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_MODERNIZR', DJFRONTEND_MODERNIZR_DEFAULT)
if getattr(settings, 'TEMPLATE_DEBUG', False):
template = '<script src="{static}djfrontend/js/modernizr/{v}/modernizr.js"></script>'
else:
template = (
'<script src="//cdnjs.cloudflare.com/ajax/libs/modernizr/{v}/modernizr.min.js"></script>\n'
'<script>window.Modernizr || document.write(\'<script src="{static}djfrontend/js/modernizr/{v}/modernizr.min.js"><\/script>\')</script>')
return format_html(template, static=_static_url, v=version) |
def djfrontend_jquery(version=None):
"""
Returns jQuery JavaScript file according to version number.
TEMPLATE_DEBUG returns full file, otherwise returns minified file from Google CDN with local fallback.
Included in HTML5 Boilerplate.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_JQUERY', DJFRONTEND_JQUERY_DEFAULT)
if getattr(settings, 'TEMPLATE_DEBUG', False):
template = '<script src="{static}djfrontend/js/jquery/{v}/jquery.js"></script>'
else:
template = (
'<script src="//ajax.googleapis.com/ajax/libs/jquery/{v}/jquery.min.js"></script>'
'<script>window.jQuery || document.write(\'<script src="{static}djfrontend/js/jquery/{v}/jquery.min.js"><\/script>\')</script>')
return format_html(template, static=_static_url, v=version) |
def djfrontend_jqueryui(version=None):
"""
Returns the jQuery UI plugin file according to version number.
TEMPLATE_DEBUG returns full file, otherwise returns minified file from Google CDN with local fallback.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_JQUERYUI', DJFRONTEND_JQUERYUI_DEFAULT)
if getattr(settings, 'TEMPLATE_DEBUG', False):
return format_html(
'<script src="{0}djfrontend/js/jquery/jqueryui/{1}/jquery-ui.js"></script>',
settings.STATIC_URL, version)
else:
return format_html(
'<script src="//ajax.googleapis.com/ajax/libs/jqueryui/{v}/jquery-ui.min.js"></script>'
'<script>window.jQuery.ui || document.write(\'<script src="{static}djfrontend/js/jquery/jqueryui/{v}/jquery-ui.min.js"><\/script>\')</script>',
static=_static_url, v=version) |
def djfrontend_jquery_datatables(version=None):
"""
Returns the jQuery DataTables plugin file according to version number.
TEMPLATE_DEBUG returns full file, otherwise returns minified file.
"""
if version is None:
if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', False):
version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT)
else:
version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT)
if getattr(settings, 'TEMPLATE_DEBUG', False):
template = '<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.js"></script>'
else:
template = (
'<script src="//cdnjs.cloudflare.com/ajax/libs/datatables/{v}/jquery.dataTables.min.js"></script>'
'<script>window.jQuery.fn.DataTable || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.dataTables/{v}/jquery.dataTables.min.js"><\/script>\')</script>')
return format_html(template, static=_static_url, v=version) |
def djfrontend_jquery_datatables_css(version=None):
"""
Returns the jQuery DataTables CSS file according to version number.
"""
if version is None:
if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_CSS', False):
version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT)
else:
version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_CSS', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT)
return format_html(
'<link rel="stylesheet" href="{static}djfrontend/css/jquery/jquery.dataTables/{v}/jquery.dataTables{min}.css">',
static=_static_url, v=version, min=_min) |
def djfrontend_jquery_datatables_themeroller(version=None):
"""
Returns the jQuery DataTables ThemeRoller CSS file according to version number.
"""
if version is None:
if not getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_THEMEROLLER', False):
version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_VERSION', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT)
else:
version = getattr(settings, 'DJFRONTEND_JQUERY_DATATABLES_THEMEROLLER', DJFRONTEND_JQUERY_DATATABLES_VERSION_DEFAULT)
return format_html(
'<link rel="stylesheet" href="href="{static}djfrontend/css/jquery/jquery.dataTables/{v}/jquery.dataTables_themeroller.min.css">',
static=_static_url, v=version) |
def djfrontend_jquery_formset(version=None):
"""
Returns the jQuery Dynamic Formset plugin file according to version number.
TEMPLATE_DEBUG returns full file, otherwise returns minified file.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_JQUERY_FORMSET', DJFRONTEND_JQUERY_FORMSET_DEFAULT)
if getattr(settings, 'TEMPLATE_DEBUG', False):
template = '<script src="{static}djfrontend/js/jquery/jquery.formset/{v}/jquery.formset.js"></script>'
else:
template = (
'<script src="//cdnjs.cloudflare.com/ajax/libs/jquery.formset/{v}/jquery.formset.min.js"></script>\n'
'<script>window.jQuery.fn.formset || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.formset/{v}/jquery.formset.min.js"><\/script>\')</script>')
return format_html(template, static=_static_url, v=version) |
def djfrontend_jquery_scrollto(version=None):
"""
Returns the jQuery ScrollTo plugin file according to version number.
TEMPLATE_DEBUG returns full file, otherwise returns minified file.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_JQUERY_SCROLLTO', DJFRONTEND_JQUERY_SCROLLTO_DEFAULT)
if getattr(settings, 'TEMPLATE_DEBUG', False):
template = '<script src="{static}djfrontend/js/jquery/jquery.scrollTo/{v}/jquery.scrollTo.js"></script>'
else:
template = (
'<script src="//cdnjs.cloudflare.com/ajax/libs/jquery-scrollTo/{v}/jquery.scrollTo.min.js"></script>'
'<script>window.jQuery.fn.scrollTo || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.scrollTo/{v}/jquery.scrollTo.min.js"><\/script>\')</script>')
return format_html(template, static=_static_url, v=version) |
def djfrontend_jquery_smoothscroll(version=None):
"""
Returns the jQuery Smooth Scroll plugin file according to version number.
TEMPLATE_DEBUG returns full file, otherwise returns minified file.
"""
if version is None:
version = getattr(settings, 'DJFRONTEND_JQUERY_SMOOTHSCROLL', DJFRONTEND_JQUERY_SMOOTHSCROLL_DEFAULT)
if getattr(settings, 'TEMPLATE_DEBUG', False):
template = '<script src="{static}djfrontend/js/jquery/jquery.smooth-scroll/{v}/jquery.smooth-scroll.js"></script>'
else:
template = (
'<script src="//cdnjs.cloudflare.com/ajax/libs/jquery-smooth-scroll/{v}/jquery.smooth-scroll.min.js"></script>'
'<script>window.jQuery.fn.smoothScroll || document.write(\'<script src="{static}djfrontend/js/jquery/jquery.smooth-scroll/{v}/jquery.smooth-scroll.min.js"><\/script>\')</script>')
return format_html(template, static=_static_url, v=version) |
def djfrontend_twbs_css(version=None):
"""
Returns Twitter Bootstrap CSS file.
TEMPLATE_DEBUG returns full file, otherwise returns minified file.
"""
if version is None:
if not getattr(settings, 'DJFRONTEND_TWBS_CSS', False):
version = getattr(settings, 'DJFRONTEND_TWBS_VERSION', DJFRONTEND_TWBS_VERSION_DEFAULT)
else:
version = getattr(settings, 'DJFRONTEND_TWBS_CSS', DJFRONTEND_TWBS_VERSION_DEFAULT)
return format_html(
'<link rel="stylesheet" href="{static}djfrontend/css/twbs/{v}/bootstrap{min}.css">',
static=_static_url, v=version, min=_min) |
def djfrontend_twbs_js(version=None, files=None):
"""
Returns Twitter Bootstrap JavaScript file(s).
all returns concatenated file; full file for TEMPLATE_DEBUG, minified otherwise.
Other choice are:
affix,
alert,
button,
carousel,
collapse,
dropdown,
modal,
popover (adds tooltip if not included),
scrollspy,
tab,
tooltip,
transition.
Individual files are not minified.
"""
if version is None:
if not getattr(settings, 'DJFRONTEND_TWBS_JS_VERSION', False):
version = getattr(settings, 'DJFRONTEND_TWBS_VERSION', DJFRONTEND_TWBS_VERSION_DEFAULT)
else:
version = getattr(settings, 'DJFRONTEND_TWBS_JS_VERSION', DJFRONTEND_TWBS_VERSION_DEFAULT)
if files:
if files != 'all':
files = files.split(' ')
elif getattr(settings, 'DJFRONTEND_TWBS_JS_FILES', False) and settings.DJFRONTEND_TWBS_JS_FILES != 'all':
files = settings.DJFRONTEND_TWBS_JS_FILES.split(' ')
else:
files = 'all'
if files == 'all':
return format_html(
'<script src="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/{v}/js/bootstrap.min.js"></script>\n'
'<script>window.jQuery.fn.scrollspy || document.write(\'<script src="{static}djfrontend/js/twbs/{v}/bootstrap.min.js"><\/script>\')</script>',
v=version, static=_static_url)
else:
if 'popover' in files and 'tooltip' not in files:
files.append('tooltip')
for file in files:
file = ['<script src="%sdjfrontend/js/twbs/%s/%s.js"></script>' %
(_static_url, version, file) for file in files]
return mark_safe('\n'.join(file)) |
def djfrontend_ga(account=None):
"""
Returns Google Analytics asynchronous snippet.
Use DJFRONTEND_GA_SETDOMAINNAME to set domain for multiple, or cross-domain tracking.
Set DJFRONTEND_GA_SETALLOWLINKER to use _setAllowLinker method on target site for cross-domain tracking.
Included in HTML5 Boilerplate.
"""
if account is None:
account = getattr(settings, 'DJFRONTEND_GA', False)
if account:
if getattr(settings, 'TEMPLATE_DEBUG', False):
return ''
else:
if getattr(settings, 'DJFRONTEND_GA_SETDOMAINNAME', False):
if getattr(settings, 'DJFRONTEND_GA_SETALLOWLINKER', False):
return mark_safe(
'<script>(function(i,s,o,g,r,a,m){i["GoogleAnalyticsObject"]=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,"script","//www.google-analytics.com/analytics.js","ga");ga("require", "linker");ga("linker:autoLink", ["%s"]);ga("create", "%s", "auto", {"allowLinker": true});ga("send", "pageview");</script>' %
(settings.DJFRONTEND_GA_SETDOMAINNAME, account))
else:
return mark_safe(
'<script>(function(i,s,o,g,r,a,m){i["GoogleAnalyticsObject"]=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,"script","//www.google-analytics.com/analytics.js","ga");ga("create", "%s", "%s");ga("send", "pageview");</script>' %
(account, settings.DJFRONTEND_GA_SETDOMAINNAME))
else:
return mark_safe(
'<script>(function(i,s,o,g,r,a,m){i["GoogleAnalyticsObject"]=r;i[r]=i[r]||function(){(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)})(window,document,"script","//www.google-analytics.com/analytics.js","ga");ga("create", "%s", "auto");ga("send", "pageview");</script>' % account)
else:
return '' |
def render(self, name, value, attrs=None):
u"""Render CodeMirrorTextarea"""
if self.js_var_format is not None:
js_var_bit = 'var %s = ' % (self.js_var_format % name)
else:
js_var_bit = ''
output = [super(CodeMirrorTextarea, self).render(name, value, attrs),
'<script type="text/javascript">%sCodeMirror.fromTextArea(document.getElementById(%s), %s);</script>' %
(js_var_bit, '"id_%s"' % name, self.option_json)]
return mark_safe('\n'.join(output)) |
def iter_auth_hashes(user, purpose, minutes_valid):
"""
Generate auth tokens tied to user and specified purpose.
The hash expires at midnight on the minute of now + minutes_valid, such
that when minutes_valid=1 you get *at least* 1 minute to use the token.
"""
now = timezone.now().replace(microsecond=0, second=0)
for minute in range(minutes_valid + 1):
yield hashlib.sha1(
'%s:%s:%s:%s:%s' % (
now - datetime.timedelta(minutes=minute),
user.password,
purpose,
user.pk,
settings.SECRET_KEY,
),
).hexdigest() |
def calc_expiry_time(minutes_valid):
"""Return specific time an auth_hash will expire."""
return (
timezone.now() + datetime.timedelta(minutes=minutes_valid + 1)
).replace(second=0, microsecond=0) |
def get_user_token(user, purpose, minutes_valid):
"""Return login token info for given user."""
token = ''.join(
dumps([
user.get_username(),
get_auth_hash(user, purpose),
]).encode('base64').split('\n')
)
return {
'id': get_meteor_id(user),
'token': token,
'tokenExpires': calc_expiry_time(minutes_valid),
} |
def serialize(self, obj, *args, **kwargs):
"""Serialize user as per Meteor accounts serialization."""
# use default serialization, then modify to suit our needs.
data = super(Users, self).serialize(obj, *args, **kwargs)
# everything that isn't handled explicitly ends up in `profile`
profile = data.pop('fields')
profile.setdefault('name', obj.get_full_name())
fields = data['fields'] = {
'username': obj.get_username(),
'emails': [],
'profile': profile,
'permissions': sorted(self.model.get_all_permissions(obj)),
}
# clear out sensitive data
for sensitive in [
'password',
'user_permissions_ids',
'is_active',
'is_staff',
'is_superuser',
'groups_ids',
]:
profile.pop(sensitive, None)
# createdAt (default is django.contrib.auth.models.User.date_joined)
try:
fields['createdAt'] = profile.pop('date_joined')
except KeyError:
date_joined = getattr(
obj, 'get_date_joined',
lambda: getattr(obj, 'date_joined', None)
)()
if date_joined:
fields['createdAt'] = date_joined
# email (default is django.contrib.auth.models.User.email)
try:
email = profile.pop('email')
except KeyError:
email = getattr(
obj, 'get_email',
lambda: getattr(obj, 'email', None)
)()
if email:
fields['emails'].append({'address': email, 'verified': True})
return data |
def deserialize_profile(profile, key_prefix='', pop=False):
"""De-serialize user profile fields into concrete model fields."""
result = {}
if pop:
getter = profile.pop
else:
getter = profile.get
def prefixed(name):
"""Return name prefixed by `key_prefix`."""
return '%s%s' % (key_prefix, name)
for key in profile.keys():
val = getter(key)
if key == prefixed('name'):
result['full_name'] = val
else:
raise MeteorError(400, 'Bad profile key: %r' % key)
return result |
def update(self, selector, update, options=None):
"""Update user data."""
# we're ignoring the `options` argument at this time
del options
user = get_object(
self.model, selector['_id'],
pk=this.user_id,
)
profile_update = self.deserialize_profile(
update['$set'], key_prefix='profile.', pop=True,
)
if len(update['$set']) != 0:
raise MeteorError(400, 'Invalid update fields: %r')
for key, val in profile_update.items():
setattr(user, key, val)
user.save() |
def user_factory(self):
"""Retrieve the current user (or None) from the database."""
if this.user_id is None:
return None
return self.user_model.objects.get(pk=this.user_id) |
def update_subs(new_user_id):
"""Update subs to send added/removed for collections with user_rel."""
for sub in Subscription.objects.filter(connection=this.ws.connection):
params = loads(sub.params_ejson)
pub = API.get_pub_by_name(sub.publication)
# calculate the querysets prior to update
pre = collections.OrderedDict([
(col, query) for col, query
in API.sub_unique_objects(sub, params, pub)
])
# save the subscription with the updated user_id
sub.user_id = new_user_id
sub.save()
# calculate the querysets after the update
post = collections.OrderedDict([
(col, query) for col, query
in API.sub_unique_objects(sub, params, pub)
])
# first pass, send `added` for objs unique to `post`
for col_post, query in post.items():
try:
qs_pre = pre[col_post]
query = query.exclude(
pk__in=qs_pre.order_by().values('pk'),
)
except KeyError:
# collection not included pre-auth, everything is added.
pass
for obj in query:
this.ws.send(col_post.obj_change_as_msg(obj, ADDED))
# second pass, send `removed` for objs unique to `pre`
for col_pre, query in pre.items():
try:
qs_post = post[col_pre]
query = query.exclude(
pk__in=qs_post.order_by().values('pk'),
)
except KeyError:
# collection not included post-auth, everything is removed.
pass
for obj in query:
this.ws.send(col_pre.obj_change_as_msg(obj, REMOVED)) |
def auth_failed(**credentials):
"""Consistent fail so we don't provide attackers with valuable info."""
if credentials:
user_login_failed.send_robust(
sender=__name__,
credentials=auth._clean_credentials(credentials),
)
raise MeteorError(403, 'Authentication failed.') |
def validated_user(cls, token, purpose, minutes_valid):
"""Resolve and validate auth token, returns user object."""
try:
username, auth_hash = loads(token.decode('base64'))
except (ValueError, Error):
cls.auth_failed(token=token)
try:
user = cls.user_model.objects.get(**{
cls.user_model.USERNAME_FIELD: username,
'is_active': True,
})
user.backend = 'django.contrib.auth.backends.ModelBackend'
except cls.user_model.DoesNotExist:
cls.auth_failed(username=username, token=token)
if auth_hash not in iter_auth_hashes(user, purpose, minutes_valid):
cls.auth_failed(username=username, token=token)
return user |
def check_secure():
"""Check request, return False if using SSL or local connection."""
if this.request.is_secure():
return True # using SSL
elif this.request.META['REMOTE_ADDR'] in [
'localhost',
'127.0.0.1',
]:
return True # localhost
raise MeteorError(403, 'Authentication refused without SSL.') |
def get_username(self, user):
"""Retrieve username from user selector."""
if isinstance(user, basestring):
return user
elif isinstance(user, dict) and len(user) == 1:
[(key, val)] = user.items()
if key == 'username' or (key == self.user_model.USERNAME_FIELD):
# username provided directly
return val
elif key in ('email', 'emails.address'):
email_field = getattr(self.user_model, 'EMAIL_FIELD', 'email')
if self.user_model.USERNAME_FIELD == email_field:
return val # email is username
# find username by email
return self.user_model.objects.values_list(
self.user_model.USERNAME_FIELD, flat=True,
).get(**{email_field: val})
elif key in ('id', 'pk'):
# find username by primary key (ID)
return self.user_model.objects.values_list(
self.user_model.USERNAME_FIELD, flat=True,
).get(
pk=val,
)
else:
raise MeteorError(400, 'Invalid user lookup: %r' % key)
else:
raise MeteorError(400, 'Invalid user expression: %r' % user) |
def create_user(self, params):
"""Register a new user account."""
receivers = create_user.send(
sender=__name__,
request=this.request,
params=params,
)
if len(receivers) == 0:
raise NotImplementedError(
'Handler for `create_user` not registered.'
)
user = receivers[0][1]
user = auth.authenticate(
username=user.get_username(), password=params['password'],
)
self.do_login(user)
return get_user_token(
user=user, purpose=HashPurpose.RESUME_LOGIN,
minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN],
) |
def do_login(self, user):
"""Login a user."""
this.user_id = user.pk
this.user_ddp_id = get_meteor_id(user)
# silent subscription (sans sub/nosub msg) to LoggedInUser pub
this.user_sub_id = meteor_random_id()
API.do_sub(this.user_sub_id, 'LoggedInUser', silent=True)
self.update_subs(user.pk)
user_logged_in.send(
sender=user.__class__, request=this.request, user=user,
) |
def do_logout(self):
"""Logout a user."""
# silent unsubscription (sans sub/nosub msg) from LoggedInUser pub
API.do_unsub(this.user_sub_id, silent=True)
del this.user_sub_id
self.update_subs(None)
user_logged_out.send(
sender=self.user_model, request=this.request, user=this.user,
)
this.user_id = None
this.user_ddp_id = None |
def login(self, params):
"""Login either with resume token or password."""
if 'password' in params:
return self.login_with_password(params)
elif 'resume' in params:
return self.login_with_resume_token(params)
else:
self.auth_failed(**params) |
def login_with_password(self, params):
"""Authenticate using credentials supplied in params."""
# never allow insecure login
self.check_secure()
username = self.get_username(params['user'])
password = self.get_password(params['password'])
user = auth.authenticate(username=username, password=password)
if user is not None:
# the password verified for the user
if user.is_active:
self.do_login(user)
return get_user_token(
user=user, purpose=HashPurpose.RESUME_LOGIN,
minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN],
)
# Call to `authenticate` couldn't verify the username and password.
# It will have sent the `user_login_failed` signal, no need to pass the
# `username` argument to auth_failed().
self.auth_failed() |
def login_with_resume_token(self, params):
"""
Login with existing resume token.
Either the token is valid and the user is logged in, or the token is
invalid and a non-specific ValueError("Login failed.") exception is
raised - don't be tempted to give clues to attackers as to why their
logins are invalid!
"""
# never allow insecure login
self.check_secure()
# pull the username and auth_hash from the token
user = self.validated_user(
params['resume'], purpose=HashPurpose.RESUME_LOGIN,
minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN],
)
self.do_login(user)
return get_user_token(
user=user, purpose=HashPurpose.RESUME_LOGIN,
minutes_valid=HASH_MINUTES_VALID[HashPurpose.RESUME_LOGIN],
) |
def change_password(self, old_password, new_password):
"""Change password."""
try:
user = this.user
except self.user_model.DoesNotExist:
self.auth_failed()
user = auth.authenticate(
username=user.get_username(),
password=self.get_password(old_password),
)
if user is None:
self.auth_failed()
else:
user.set_password(self.get_password(new_password))
user.save()
password_changed.send(
sender=__name__,
request=this.request,
user=user,
)
return {"passwordChanged": True} |
def forgot_password(self, params):
"""Request password reset email."""
username = self.get_username(params)
try:
user = self.user_model.objects.get(**{
self.user_model.USERNAME_FIELD: username,
})
except self.user_model.DoesNotExist:
self.auth_failed()
minutes_valid = HASH_MINUTES_VALID[HashPurpose.PASSWORD_RESET]
token = get_user_token(
user=user, purpose=HashPurpose.PASSWORD_RESET,
minutes_valid=minutes_valid,
)
forgot_password.send(
sender=__name__,
user=user,
token=token,
request=this.request,
expiry_date=calc_expiry_time(minutes_valid),
) |
def reset_password(self, token, new_password):
"""Reset password using a token received in email then logs user in."""
user = self.validated_user(
token, purpose=HashPurpose.PASSWORD_RESET,
minutes_valid=HASH_MINUTES_VALID[HashPurpose.PASSWORD_RESET],
)
user.set_password(new_password)
user.save()
self.do_login(user)
return {"userId": this.user_ddp_id} |
def dict_merge(lft, rgt):
"""
Recursive dict merge.
Recursively merges dict's. not just simple lft['key'] = rgt['key'], if
both lft and rgt have a key who's value is a dict then dict_merge is
called on both values and the result stored in the returned dictionary.
"""
if not isinstance(rgt, dict):
return rgt
result = deepcopy(lft)
for key, val in rgt.iteritems():
if key in result and isinstance(result[key], dict):
result[key] = dict_merge(result[key], val)
else:
result[key] = deepcopy(val)
return result |
def read(path, default=None, encoding='utf8'):
"""Read encoded contents from specified path or return default."""
if not path:
return default
try:
with io.open(path, mode='r', encoding=encoding) as contents:
return contents.read()
except IOError:
if default is not None:
return default
raise |
def get(self, request, path):
"""Return HTML (or other related content) for Meteor."""
if path == 'meteor_runtime_config.js':
config = {
'DDP_DEFAULT_CONNECTION_URL': request.build_absolute_uri('/'),
'PUBLIC_SETTINGS': self.meteor_settings.get('public', {}),
'ROOT_URL': request.build_absolute_uri(
'%s/' % (
self.runtime_config.get('ROOT_URL_PATH_PREFIX', ''),
),
),
'ROOT_URL_PATH_PREFIX': '',
}
# Use HTTPS instead of HTTP if SECURE_SSL_REDIRECT is set
if config['DDP_DEFAULT_CONNECTION_URL'].startswith('http:') \
and settings.SECURE_SSL_REDIRECT:
config['DDP_DEFAULT_CONNECTION_URL'] = 'https:%s' % (
config['DDP_DEFAULT_CONNECTION_URL'].split(':', 1)[1],
)
config.update(self.runtime_config)
return HttpResponse(
'__meteor_runtime_config__ = %s;' % dumps(config),
content_type='text/javascript',
)
try:
file_path, content_type = self.url_map[path]
with open(file_path, 'r') as content:
return HttpResponse(
content.read(),
content_type=content_type,
)
except KeyError:
return HttpResponse(self.html) |
def get_meteor_id(obj_or_model, obj_pk=None):
"""Return an Alea ID for the given object."""
if obj_or_model is None:
return None
# Django model._meta is now public API -> pylint: disable=W0212
meta = obj_or_model._meta
model = meta.model
if model is ObjectMapping:
# this doesn't make sense - raise TypeError
raise TypeError("Can't map ObjectMapping instances through self.")
# try getting value of AleaIdField straight from instance if possible
if isinstance(obj_or_model, model):
# obj_or_model is an instance, not a model.
if isinstance(meta.pk, AleaIdField):
return obj_or_model.pk
if obj_pk is None:
# fall back to primary key, but coerce as string type for lookup.
obj_pk = str(obj_or_model.pk)
alea_unique_fields = [
field
for field in meta.local_fields
if isinstance(field, AleaIdField) and field.unique
]
if len(alea_unique_fields) == 1:
# found an AleaIdField with unique=True, assume it's got the value.
aid = alea_unique_fields[0].attname
if isinstance(obj_or_model, model):
val = getattr(obj_or_model, aid)
elif obj_pk is None:
val = None
else:
val = model.objects.values_list(aid, flat=True).get(
pk=obj_pk,
)
if val:
return val
if obj_pk is None:
# bail out if args are (model, pk) but pk is None.
return None
# fallback to using AleaIdField from ObjectMapping model.
content_type = ContentType.objects.get_for_model(model)
try:
return ObjectMapping.objects.values_list(
'meteor_id', flat=True,
).get(
content_type=content_type,
object_id=obj_pk,
)
except ObjectDoesNotExist:
return ObjectMapping.objects.create(
content_type=content_type,
object_id=obj_pk,
meteor_id=meteor_random_id('/collection/%s' % meta),
).meteor_id |
def get_meteor_ids(model, object_ids):
"""Return Alea ID mapping for all given ids of specified model."""
# Django model._meta is now public API -> pylint: disable=W0212
meta = model._meta
result = collections.OrderedDict(
(str(obj_pk), None)
for obj_pk
in object_ids
)
if isinstance(meta.pk, AleaIdField):
# primary_key is an AleaIdField, use it.
return collections.OrderedDict(
(obj_pk, obj_pk) for obj_pk in object_ids
)
alea_unique_fields = [
field
for field in meta.local_fields
if isinstance(field, AleaIdField) and field.unique and not field.null
]
if len(alea_unique_fields) == 1:
aid = alea_unique_fields[0].name
query = model.objects.filter(
pk__in=object_ids,
).values_list('pk', aid)
else:
content_type = ContentType.objects.get_for_model(model)
query = ObjectMapping.objects.filter(
content_type=content_type,
object_id__in=list(result)
).values_list('object_id', 'meteor_id')
for obj_pk, meteor_id in query:
result[str(obj_pk)] = meteor_id
for obj_pk, meteor_id in result.items():
if meteor_id is None:
result[obj_pk] = get_meteor_id(model, obj_pk)
return result |
def get_object_id(model, meteor_id):
"""Return an object ID for the given meteor_id."""
if meteor_id is None:
return None
# Django model._meta is now public API -> pylint: disable=W0212
meta = model._meta
if model is ObjectMapping:
# this doesn't make sense - raise TypeError
raise TypeError("Can't map ObjectMapping instances through self.")
if isinstance(meta.pk, AleaIdField):
# meteor_id is the primary key
return meteor_id
alea_unique_fields = [
field
for field in meta.local_fields
if isinstance(field, AleaIdField) and field.unique
]
if len(alea_unique_fields) == 1:
# found an AleaIdField with unique=True, assume it's got the value.
val = model.objects.values_list(
'pk', flat=True,
).get(**{
alea_unique_fields[0].attname: meteor_id,
})
if val:
return val
content_type = ContentType.objects.get_for_model(model)
return ObjectMapping.objects.filter(
content_type=content_type,
meteor_id=meteor_id,
).values_list('object_id', flat=True).get() |
def get_object_ids(model, meteor_ids):
"""Return all object IDs for the given meteor_ids."""
if model is ObjectMapping:
# this doesn't make sense - raise TypeError
raise TypeError("Can't map ObjectMapping instances through self.")
# Django model._meta is now public API -> pylint: disable=W0212
meta = model._meta
alea_unique_fields = [
field
for field in meta.local_fields
if isinstance(field, AleaIdField) and field.unique and not field.null
]
result = collections.OrderedDict(
(str(meteor_id), None)
for meteor_id
in meteor_ids
)
if len(alea_unique_fields) == 1:
aid = alea_unique_fields[0].name
query = model.objects.filter(**{
'%s__in' % aid: meteor_ids,
}).values_list(aid, 'pk')
else:
content_type = ContentType.objects.get_for_model(model)
query = ObjectMapping.objects.filter(
content_type=content_type,
meteor_id__in=meteor_ids,
).values_list('meteor_id', 'object_id')
for meteor_id, object_id in query:
result[meteor_id] = object_id
return result |
def get_object(model, meteor_id, *args, **kwargs):
"""Return an object for the given meteor_id."""
# Django model._meta is now public API -> pylint: disable=W0212
meta = model._meta
if isinstance(meta.pk, AleaIdField):
# meteor_id is the primary key
return model.objects.filter(*args, **kwargs).get(pk=meteor_id)
alea_unique_fields = [
field
for field in meta.local_fields
if isinstance(field, AleaIdField) and field.unique and not field.null
]
if len(alea_unique_fields) == 1:
return model.objects.filter(*args, **kwargs).get(**{
alea_unique_fields[0].name: meteor_id,
})
return model.objects.filter(*args, **kwargs).get(
pk=get_object_id(model, meteor_id),
) |
def get_pk_value_on_save(self, instance):
"""Generate ID if required."""
value = super(AleaIdField, self).get_pk_value_on_save(instance)
if not value:
value = self.get_seeded_value(instance)
return value |
def pre_save(self, model_instance, add):
"""Generate ID if required."""
value = super(AleaIdField, self).pre_save(model_instance, add)
if (not value) and self.default in (meteor_random_id, NOT_PROVIDED):
value = self.get_seeded_value(model_instance)
setattr(model_instance, self.attname, value)
return value |
def set_default_forwards(app_name, operation, apps, schema_editor):
"""Set default value for AleaIdField."""
model = apps.get_model(app_name, operation.model_name)
for obj_pk in model.objects.values_list('pk', flat=True):
model.objects.filter(pk=obj_pk).update(**{
operation.name: get_meteor_id(model, obj_pk),
}) |
def set_default_reverse(app_name, operation, apps, schema_editor):
"""Unset default value for AleaIdField."""
model = apps.get_model(app_name, operation.model_name)
for obj_pk in model.objects.values_list('pk', flat=True):
get_meteor_id(model, obj_pk) |
def truncate(self, app_label, schema_editor, models):
"""Truncate tables."""
for model_name in models:
model = '%s_%s' % (app_label, model_name)
schema_editor.execute(
'TRUNCATE TABLE %s RESTART IDENTITY CASCADE' % (
model.lower(),
),
) |
def database_forwards(self, app_label, schema_editor, from_state, to_state):
"""Use schema_editor to apply any forward changes."""
self.truncate(app_label, schema_editor, self.truncate_forwards) |
def database_backwards(self, app_label, schema_editor, from_state, to_state):
"""Use schema_editor to apply any reverse changes."""
self.truncate(app_label, schema_editor, self.truncate_backwards) |
def initialize_options(self):
"""Set command option defaults."""
setuptools.command.build_py.build_py.initialize_options(self)
self.meteor = 'meteor'
self.meteor_debug = False
self.build_lib = None
self.package_dir = None
self.meteor_builds = []
self.no_prune_npm = None
self.inplace = True |
def finalize_options(self):
"""Update command options."""
# Get all the information we need to install pure Python modules
# from the umbrella 'install' command -- build (source) directory,
# install (target) directory, and whether to compile .py files.
self.set_undefined_options(
'build',
('build_lib', 'build_lib'),
)
self.set_undefined_options(
'build_py',
('package_dir', 'package_dir'),
)
setuptools.command.build_py.build_py.finalize_options(self) |
def run(self):
"""Peform build."""
for (package, source, target, extra_args) in self.meteor_builds:
src_dir = self.get_package_dir(package)
# convert UNIX-style paths to directory names
project_dir = self.path_to_dir(src_dir, source)
target_dir = self.path_to_dir(src_dir, target)
output_dir = self.path_to_dir(
os.path.abspath(SETUP_DIR if self.inplace else self.build_lib),
target_dir,
)
# construct command line.
cmdline = [self.meteor, 'build', '--directory', output_dir]
no_prune_npm = self.no_prune_npm
if extra_args[:1] == ['--no-prune-npm']:
no_prune_npm = True
extra_args[:1] = []
if self.meteor_debug and '--debug' not in cmdline:
cmdline.append('--debug')
cmdline.extend(extra_args)
# execute command
log.info(
'building meteor app %r (%s)', project_dir, ' '.join(cmdline),
)
subprocess.check_call(cmdline, cwd=project_dir)
if not no_prune_npm:
# django-ddp doesn't use bundle/programs/server/npm cruft
npm_build_dir = os.path.join(
output_dir, 'bundle', 'programs', 'server', 'npm',
)
log.info('pruning meteor npm build %r', npm_build_dir)
shutil.rmtree(npm_build_dir) |
def path_to_dir(*path_args):
"""Convert a UNIX-style path into platform specific directory spec."""
return os.path.join(
*list(path_args[:-1]) + path_args[-1].split(posixpath.sep)
) |
def seed(self, values):
"""Seed internal state from supplied values."""
if not values:
# Meteor uses epoch seconds as the seed if no args supplied, we use
# a much more secure seed by default to avoid hash collisions.
seed_ids = [int, str, random, self, values, self.__class__]
random.shuffle(seed_ids)
values = list(map(id, seed_ids)) + [time.time(), os.urandom(512)]
mash = Mash()
self.c = 1
self.s0 = mash(' ')
self.s1 = mash(' ')
self.s2 = mash(' ')
for val in values:
self.s0 -= mash(val)
if self.s0 < 0:
self.s0 += 1
self.s1 -= mash(val)
if self.s1 < 0:
self.s1 += 1
self.s2 -= mash(val)
if self.s2 < 0:
self.s2 += 1 |
def state(self):
"""Return internal state, useful for testing."""
return {'c': self.c, 's0': self.s0, 's1': self.s1, 's2': self.s2} |
def random_string(self, length, alphabet):
"""Return string of `length` elements chosen from `alphabet`."""
return ''.join(
self.choice(alphabet) for n in range(length)
) |
def api_endpoint(path_or_func=None, decorate=True):
"""
Decorator to mark a method as an API endpoint for later registration.
Args:
path_or_func: either the function to be decorated or its API path.
decorate (bool): Apply API_ENDPOINT_DECORATORS if True (default).
Returns:
Callable: Decorated function (with optionally applied decorators).
Examples:
>>> from dddp.api import APIMixin, api_endpoint
>>> class Counter(APIMixin):
... value = 0
...
... # default API path matches function name 'increment'.
... @api_endpoint
... def increment(self, amount):
... '''Increment counter value by `amount`.'''
... self.value += amount
... return self.value
...
... # excplicitly set API path to 'Decrement'.
... @api_endpoint('Decrement')
... def decrement(self, amount):
... '''Decrement counter value by `amount`.'''
... self.value -= amount
... return self.value
"""
def maybe_decorated(func):
"""Apply API_ENDPOINT_DECORATORS to func."""
if decorate:
for decorator in API_ENDPOINT_DECORATORS:
func = decorator()(func)
return func
if callable(path_or_func):
path_or_func.api_path = path_or_func.__name__
return maybe_decorated(path_or_func)
else:
def _api_endpoint(func):
"""Decorator inner."""
if path_or_func is None:
func.api_path = func.__name__
else:
func.api_path = path_or_func
return maybe_decorated(func)
return _api_endpoint |
def api_endpoints(obj):
"""Iterator over all API endpoint names and callbacks."""
for name in dir(obj):
attr = getattr(obj, name)
api_path = getattr(attr, 'api_path', None)
if api_path:
yield (
'%s%s' % (obj.api_path_prefix, api_path),
attr,
)
for api_provider in obj.api_providers:
for api_path, attr in api_endpoints(api_provider):
yield (api_path, attr) |
def api_path_map(self):
"""Cached dict of api_path: func."""
if self._api_path_cache is None:
self._api_path_cache = {
api_path: func
for api_path, func
in api_endpoints(self)
}
return self._api_path_cache |
def clear_api_path_map_cache(self):
"""Clear out cache for api_path_map."""
self._api_path_cache = None
for api_provider in self.api_providers:
if six.get_method_self(
api_provider.clear_api_path_map_cache,
) is not None:
api_provider.clear_api_path_map_cache() |
def safe_call(func, *args, **kwargs):
"""
Call `func(*args, **kwargs)` but NEVER raise an exception.
Useful in situations such as inside exception handlers where calls to
`logging.error` try to send email, but the SMTP server isn't always
availalbe and you don't want your exception handler blowing up.
"""
try:
return None, func(*args, **kwargs)
except Exception: # pylint: disable=broad-except
# something went wrong during the call, return a stack trace that can
# be dealt with by the caller
return traceback.format_exc(), None |
def dprint(name, val):
"""Debug print name and val."""
from pprint import pformat
print(
'% 5s: %s' % (
name,
'\n '.join(
pformat(
val, indent=4, width=75,
).split('\n')
),
),
) |
def validate_kwargs(func, kwargs):
"""Validate arguments to be supplied to func."""
func_name = func.__name__
argspec = inspect.getargspec(func)
all_args = argspec.args[:]
defaults = list(argspec.defaults or [])
# ignore implicit 'self' argument
if inspect.ismethod(func) and all_args[:1] == ['self']:
all_args[:1] = []
# don't require arguments that have defaults
if defaults:
required = all_args[:-len(defaults)]
else:
required = all_args[:]
# translate 'foo_' to avoid reserved names like 'id'
trans = {
arg: arg.endswith('_') and arg[:-1] or arg
for arg
in all_args
}
for key in list(kwargs):
key_adj = '%s_' % key
if key_adj in all_args:
kwargs[key_adj] = kwargs.pop(key)
# figure out what we're missing
supplied = sorted(kwargs)
missing = [
trans.get(arg, arg) for arg in required
if arg not in supplied
]
if missing:
raise MeteorError(
400,
func.err,
'Missing required arguments to %s: %s' % (
func_name,
' '.join(missing),
),
)
# figure out what is extra
extra = [
arg for arg in supplied
if arg not in all_args
]
if extra:
raise MeteorError(
400,
func.err,
'Unknown arguments to %s: %s' % (func_name, ' '.join(extra)),
) |
def on_open(self):
"""Handle new websocket connection."""
this.request = WSGIRequest(self.ws.environ)
this.ws = self
this.send = self.send
this.reply = self.reply
self.logger = self.ws.logger
self.remote_ids = collections.defaultdict(set)
# `_tx_buffer` collects outgoing messages which must be sent in order
self._tx_buffer = {}
# track the head of the queue (buffer) and the next msg to be sent
self._tx_buffer_id_gen = itertools.cycle(irange(sys.maxint))
self._tx_next_id_gen = itertools.cycle(irange(sys.maxint))
# start by waiting for the very first message
self._tx_next_id = next(self._tx_next_id_gen)
this.remote_addr = self.remote_addr = \
'{0[REMOTE_ADDR]}:{0[REMOTE_PORT]}'.format(
self.ws.environ,
)
this.subs = {}
safe_call(self.logger.info, '+ %s OPEN', self)
self.send('o')
self.send('a["{\\"server_id\\":\\"0\\"}"]') |
def on_close(self, *args, **kwargs):
"""Handle closing of websocket connection."""
if self.connection is not None:
del self.pgworker.connections[self.connection.pk]
self.connection.delete()
self.connection = None
signals.request_finished.send(sender=self.__class__)
safe_call(self.logger.info, '- %s %s', self, args or 'CLOSE') |
def on_message(self, message):
"""Process a message received from remote."""
if self.ws.closed:
return None
try:
safe_call(self.logger.debug, '< %s %r', self, message)
# process individual messages
for data in self.ddp_frames_from_message(message):
self.process_ddp(data)
# emit request_finished signal to close DB connections
signals.request_finished.send(sender=self.__class__)
except geventwebsocket.WebSocketError:
self.ws.close() |
def ddp_frames_from_message(self, message):
"""Yield DDP messages from a raw WebSocket message."""
# parse message set
try:
msgs = ejson.loads(message)
except ValueError:
self.reply(
'error', error=400, reason='Data is not valid EJSON',
)
raise StopIteration
if not isinstance(msgs, list):
self.reply(
'error', error=400, reason='Invalid EJSON messages',
)
raise StopIteration
# process individual messages
while msgs:
# pop raw message from the list
raw = msgs.pop(0)
# parse message payload
try:
data = ejson.loads(raw)
except (TypeError, ValueError):
data = None
if not isinstance(data, dict):
self.reply(
'error', error=400,
reason='Invalid SockJS DDP payload',
offendingMessage=raw,
)
yield data
if msgs:
# yield to other greenlets before processing next msg
gevent.sleep() |
def process_ddp(self, data):
"""Process a single DDP message."""
msg_id = data.get('id', None)
try:
msg = data.pop('msg')
except KeyError:
self.reply(
'error', reason='Bad request',
offendingMessage=data,
)
return
try:
# dispatch message
self.dispatch(msg, data)
except Exception as err: # pylint: disable=broad-except
# This should be the only protocol exception handler
kwargs = {
'msg': {'method': 'result'}.get(msg, 'error'),
}
if msg_id is not None:
kwargs['id'] = msg_id
if isinstance(err, MeteorError):
error = err.as_dict()
else:
error = {
'error': 500,
'reason': 'Internal server error',
}
if kwargs['msg'] == 'error':
kwargs.update(error)
else:
kwargs['error'] = error
if not isinstance(err, MeteorError):
# not a client error, should always be logged.
stack, _ = safe_call(
self.logger.error, '%r %r', msg, data, exc_info=1,
)
if stack is not None:
# something went wrong while logging the error, revert to
# writing a stack trace to stderr.
traceback.print_exc(file=sys.stderr)
sys.stderr.write(
'Additionally, while handling the above error the '
'following error was encountered:\n'
)
sys.stderr.write(stack)
elif settings.DEBUG:
print('ERROR: %s' % err)
dprint('msg', msg)
dprint('data', data)
error.setdefault('details', traceback.format_exc())
# print stack trace for client errors when DEBUG is True.
print(error['details'])
self.reply(**kwargs)
if msg_id and msg == 'method':
self.reply('updated', methods=[msg_id]) |
def dispatch(self, msg, kwargs):
"""Dispatch msg to appropriate recv_foo handler."""
# enforce calling 'connect' first
if self.connection is None and msg != 'connect':
self.reply('error', reason='Must connect first')
return
if msg == 'method':
if (
'method' not in kwargs
) or (
'id' not in kwargs
):
self.reply(
'error', error=400, reason='Malformed method invocation',
)
return
# lookup method handler
try:
handler = getattr(self, 'recv_%s' % msg)
except (AttributeError, UnicodeEncodeError):
raise MeteorError(404, 'Method not found')
# validate handler arguments
validate_kwargs(handler, kwargs)
# dispatch to handler
handler(**kwargs) |
def send(self, data, tx_id=None):
"""Send `data` (raw string or EJSON payload) to WebSocket client."""
# buffer data until we get pre-requisite data
if tx_id is None:
tx_id = self.get_tx_id()
self._tx_buffer[tx_id] = data
# de-queue messages from buffer
while self._tx_next_id in self._tx_buffer:
# pull next message from buffer
data = self._tx_buffer.pop(self._tx_next_id)
if self._tx_buffer:
safe_call(self.logger.debug, 'TX found %d', self._tx_next_id)
# advance next message ID
self._tx_next_id = next(self._tx_next_id_gen)
if not isinstance(data, basestring):
# ejson payload
msg = data.get('msg', None)
if msg in (ADDED, CHANGED, REMOVED):
ids = self.remote_ids[data['collection']]
meteor_id = data['id']
if msg == ADDED:
if meteor_id in ids:
msg = data['msg'] = CHANGED
else:
ids.add(meteor_id)
elif msg == CHANGED:
if meteor_id not in ids:
# object has become visible, treat as `added`.
msg = data['msg'] = ADDED
ids.add(meteor_id)
elif msg == REMOVED:
try:
ids.remove(meteor_id)
except KeyError:
continue # client doesn't have this, don't send.
data = 'a%s' % ejson.dumps([ejson.dumps(data)])
# send message
safe_call(self.logger.debug, '> %s %r', self, data)
try:
self.ws.send(data)
except geventwebsocket.WebSocketError:
self.ws.close()
self._tx_buffer.clear()
break
num_waiting = len(self._tx_buffer)
if num_waiting > 10:
safe_call(
self.logger.warn,
'TX received %d, waiting for %d, have %d waiting: %r.',
tx_id, self._tx_next_id, num_waiting, self._tx_buffer,
) |
def recv_connect(self, version=None, support=None, session=None):
"""DDP connect handler."""
del session # Meteor doesn't even use this!
if self.connection is not None:
raise MeteorError(
400, 'Session already established.',
self.connection.connection_id,
)
elif None in (version, support) or version not in self.versions:
self.reply('failed', version=self.versions[0])
elif version not in support:
raise MeteorError(400, 'Client version/support mismatch.')
else:
from dddp.models import Connection
cur = connection.cursor()
cur.execute('SELECT pg_backend_pid()')
(backend_pid,) = cur.fetchone()
this.version = version
this.support = support
self.connection = Connection.objects.create(
server_addr='%d:%s' % (
backend_pid,
self.ws.handler.socket.getsockname(),
),
remote_addr=self.remote_addr,
version=version,
)
self.pgworker.connections[self.connection.pk] = self
atexit.register(self.on_close, 'Shutting down.')
self.reply('connected', session=self.connection.connection_id) |
def recv_ping(self, id_=None):
"""DDP ping handler."""
if id_ is None:
self.reply('pong')
else:
self.reply('pong', id=id_) |
def recv_sub(self, id_, name, params):
"""DDP sub handler."""
self.api.sub(id_, name, *params) |
def recv_unsub(self, id_=None):
"""DDP unsub handler."""
if id_:
self.api.unsub(id_)
else:
self.reply('nosub') |
def recv_method(self, method, params, id_, randomSeed=None):
"""DDP method handler."""
if randomSeed is not None:
this.random_streams.random_seed = randomSeed
this.alea_random = alea.Alea(randomSeed)
self.api.method(method, params, id_)
self.reply('updated', methods=[id_]) |
def ddpp_sockjs_info(environ, start_response):
"""Inform client that WebSocket service is available."""
import random
import ejson
start_response(
'200 OK',
[
('Content-Type', 'application/json; charset=UTF-8'),
] + common_headers(environ),
)
yield ejson.dumps(collections.OrderedDict([
('websocket', True),
('origins', [
'*:*',
]),
('cookie_needed', False),
('entropy', random.getrandbits(32)),
])) |
def addr(val, default_port=8000, defualt_host='localhost'):
"""
Convert a string of format host[:port] into Addr(host, port).
>>> addr('0:80')
Addr(host='0', port=80)
>>> addr('127.0.0.1:80')
Addr(host='127.0.0.1', port=80)
>>> addr('0.0.0.0', default_port=8000)
Addr(host='0.0.0.0', port=8000)
"""
import re
import socket
match = re.match(r'\A(?P<host>.*?)(:(?P<port>(\d+|\w+)))?\Z', val)
if match is None:
raise argparse.ArgumentTypeError(
'%r is not a valid host[:port] address.' % val
)
host, port = match.group('host', 'port')
if not host:
host = defualt_host
if not port:
port = default_port
elif port.isdigit():
port = int(port)
else:
port = socket.getservbyname(port)
return Addr(host, port) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.