Search is not available for this dataset
text
stringlengths
75
104k
def mutate_list_of_nodes(node, context): """ :type context: Context """ return_annotation_started = False for child_node in node.children: if child_node.type == 'operator' and child_node.value == '->': return_annotation_started = True if return_annotation_started and child_node.type == 'operator' and child_node.value == ':': return_annotation_started = False if return_annotation_started: continue mutate_node(child_node, context=context) # this is just an optimization to stop early if context.number_of_performed_mutations and context.mutation_id != ALL: return
def mutate_file(backup, context): """ :type backup: bool :type context: Context """ with open(context.filename) as f: code = f.read() context.source = code if backup: with open(context.filename + '.bak', 'w') as f: f.write(code) result, number_of_mutations_performed = mutate(context) with open(context.filename, 'w') as f: f.write(result) return number_of_mutations_performed
def connect(self, timeout_sec=TIMEOUT_SEC): """Connect to the device. If not connected within the specified timeout then an exception is thrown. """ self._central_manager.connectPeripheral_options_(self._peripheral, None) if not self._connected.wait(timeout_sec): raise RuntimeError('Failed to connect to device within timeout period!')
def disconnect(self, timeout_sec=TIMEOUT_SEC): """Disconnect from the device. If not disconnected within the specified timeout then an exception is thrown. """ # Remove all the services, characteristics, and descriptors from the # lists of those items. Do this before disconnecting because they wont't # be accessible afterwards. for service in self.list_services(): for char in service.list_characteristics(): for desc in char.list_descriptors(): descriptor_list().remove(desc) characteristic_list().remove(char) service_list().remove(service) # Now disconnect. self._central_manager.cancelPeripheralConnection_(self._peripheral) if not self._disconnected.wait(timeout_sec): raise RuntimeError('Failed to disconnect to device within timeout period!')
def _update_advertised(self, advertised): """Called when advertisement data is received.""" # Advertisement data was received, pull out advertised service UUIDs and # name from advertisement data. if 'kCBAdvDataServiceUUIDs' in advertised: self._advertised = self._advertised + map(cbuuid_to_uuid, advertised['kCBAdvDataServiceUUIDs'])
def _characteristics_discovered(self, service): """Called when GATT characteristics have been discovered.""" # Characteristics for the specified service were discovered. Update # set of discovered services and signal when all have been discovered. self._discovered_services.add(service) if self._discovered_services >= set(self._peripheral.services()): # Found all the services characteristics, finally time to fire the # service discovery complete event. self._discovered.set()
def _characteristic_changed(self, characteristic): """Called when the specified characteristic has changed its value.""" # Called when a characteristic is changed. Get the on_changed handler # for this characteristic (if it exists) and call it. on_changed = self._char_on_changed.get(characteristic, None) if on_changed is not None: on_changed(characteristic.value().bytes().tobytes()) # Also tell the characteristic that it has a new value. # First get the service that is associated with this characteristic. char = characteristic_list().get(characteristic) if char is not None: char._value_read.set()
def _descriptor_changed(self, descriptor): """Called when the specified descriptor has changed its value.""" # Tell the descriptor it has a new value to read. desc = descriptor_list().get(descriptor) if desc is not None: desc._value_read.set()
def discover(self, service_uuids, char_uuids, timeout_sec=TIMEOUT_SEC): """Wait up to timeout_sec for the specified services and characteristics to be discovered on the device. If the timeout is exceeded without discovering the services and characteristics then an exception is thrown. """ # Since OSX tells us when all services and characteristics are discovered # this function can just wait for that full service discovery. if not self._discovered.wait(timeout_sec): raise RuntimeError('Failed to discover device services within timeout period!')
def rssi(self, timeout_sec=TIMEOUT_SEC): """Return the RSSI signal strength in decibels.""" # Kick off query to get RSSI, then wait for it to return asyncronously # when the _rssi_changed() function is called. self._rssi_read.clear() self._peripheral.readRSSI() if not self._rssi_read.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting for RSSI value!') return self._rssi
def read(self, timeout_sec=None): """Block until data is available to read from the UART. Will return a string of data that has been received. Timeout_sec specifies how many seconds to wait for data to be available and will block forever if None (the default). If the timeout is exceeded and no data is found then None is returned. """ try: return self._queue.get(timeout=timeout_sec) except queue.Empty: # Timeout exceeded, return None to signify no data received. return None
def list_characteristics(self): """Return list of GATT characteristics that have been discovered for this service. """ paths = self._props.Get(_SERVICE_INTERFACE, 'Characteristics') return map(BluezGattCharacteristic, get_provider()._get_objects_by_path(paths))
def start_notify(self, on_change): """Enable notification of changes for this characteristic on the specified on_change callback. on_change should be a function that takes one parameter which is the value (as a string of bytes) of the changed characteristic value. """ # Setup a closure to be the first step in handling the on change callback. # This closure will verify the characteristic is changed and pull out the # new value to pass to the user's on change callback. def characteristic_changed(iface, changed_props, invalidated_props): # Check that this change is for a GATT characteristic and it has a # new value. if iface != _CHARACTERISTIC_INTERFACE: return if 'Value' not in changed_props: return # Send the new value to the on_change callback. on_change(''.join(map(chr, changed_props['Value']))) # Hook up the property changed signal to call the closure above. self._props.connect_to_signal('PropertiesChanged', characteristic_changed) # Enable notifications for changes on the characteristic. self._characteristic.StartNotify()
def list_descriptors(self): """Return list of GATT descriptors that have been discovered for this characteristic. """ paths = self._props.Get(_CHARACTERISTIC_INTERFACE, 'Descriptors') return map(BluezGattDescriptor, get_provider()._get_objects_by_path(paths))
def _state_changed(self, state): """Called when the power state changes.""" logger.debug('Adapter state change: {0}'.format(state)) # Handle when powered on. if state == 5: self._powered_off.clear() self._powered_on.set() # Handle when powered off. elif state == 4: self._powered_on.clear() self._powered_off.set()
def start_scan(self, timeout_sec=TIMEOUT_SEC): """Start scanning for BLE devices.""" get_provider()._central_manager.scanForPeripheralsWithServices_options_(None, None) self._is_scanning = True
def stop_scan(self, timeout_sec=TIMEOUT_SEC): """Stop scanning for BLE devices.""" get_provider()._central_manager.stopScan() self._is_scanning = False
def power_on(self, timeout_sec=TIMEOUT_SEC): """Power on Bluetooth.""" # Turn on bluetooth and wait for powered on event to be set. self._powered_on.clear() IOBluetoothPreferenceSetControllerPowerState(1) if not self._powered_on.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting for adapter to power on!')
def power_off(self, timeout_sec=TIMEOUT_SEC): """Power off Bluetooth.""" # Turn off bluetooth. self._powered_off.clear() IOBluetoothPreferenceSetControllerPowerState(0) if not self._powered_off.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting for adapter to power off!')
def find_device(cls, timeout_sec=TIMEOUT_SEC): """Find the first available device that supports this service and return it, or None if no device is found. Will wait for up to timeout_sec seconds to find the device. """ return get_provider().find_device(service_uuids=cls.ADVERTISED, timeout_sec=timeout_sec)
def discover(cls, device, timeout_sec=TIMEOUT_SEC): """Wait until the specified device has discovered the expected services and characteristics for this service. Should be called once before other calls are made on the service. Returns true if the service has been discovered in the specified timeout, or false if not discovered. """ device.discover(cls.SERVICES, cls.CHARACTERISTICS, timeout_sec)
def find_service(self, uuid): """Return the first child service found that has the specified UUID. Will return None if no service that matches is found. """ for service in self.list_services(): if service.uuid == uuid: return service return None
def connect(self, timeout_sec=TIMEOUT_SEC): """Connect to the device. If not connected within the specified timeout then an exception is thrown. """ self._connected.clear() self._device.Connect() if not self._connected.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting to connect to device!')
def disconnect(self, timeout_sec=TIMEOUT_SEC): """Disconnect from the device. If not disconnected within the specified timeout then an exception is thrown. """ self._disconnected.clear() self._device.Disconnect() if not self._disconnected.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting to disconnect from device!')
def list_services(self): """Return a list of GattService objects that have been discovered for this device. """ return map(BluezGattService, get_provider()._get_objects(_SERVICE_INTERFACE, self._device.object_path))
def discover(self, service_uuids, char_uuids, timeout_sec=TIMEOUT_SEC): """Wait up to timeout_sec for the specified services and characteristics to be discovered on the device. If the timeout is exceeded without discovering the services and characteristics then an exception is thrown. """ # Turn expected values into a counter of each UUID for fast comparison. expected_services = set(service_uuids) expected_chars = set(char_uuids) # Loop trying to find the expected services for the device. start = time.time() while True: # Find actual services discovered for the device. actual_services = set(self.advertised) # Find actual characteristics discovered for the device. chars = map(BluezGattCharacteristic, get_provider()._get_objects(_CHARACTERISTIC_INTERFACE, self._device.object_path)) actual_chars = set(map(lambda x: x.uuid, chars)) # Compare actual discovered UUIDs with expected and return true if at # least the expected UUIDs are available. if actual_services >= expected_services and actual_chars >= expected_chars: # Found at least the expected services! return True # Couldn't find the devices so check if timeout has expired and try again. if time.time()-start >= timeout_sec: return False time.sleep(1)
def advertised(self): """Return a list of UUIDs for services that are advertised by this device. """ uuids = [] # Get UUIDs property but wrap it in a try/except to catch if the property # doesn't exist as it is optional. try: uuids = self._props.Get(_INTERFACE, 'UUIDs') except dbus.exceptions.DBusException as ex: # Ignore error if device has no UUIDs property (i.e. might not be # a BLE device). if ex.get_dbus_name() != 'org.freedesktop.DBus.Error.InvalidArgs': raise ex return [uuid.UUID(str(x)) for x in uuids]
def find_characteristic(self, uuid): """Return the first child characteristic found that has the specified UUID. Will return None if no characteristic that matches is found. """ for char in self.list_characteristics(): if char.uuid == uuid: return char return None
def find_descriptor(self, uuid): """Return the first child descriptor found that has the specified UUID. Will return None if no descriptor that matches is found. """ for desc in self.list_descriptors(): if desc.uuid == uuid: return desc return None
def read_value(self, timeout_sec=TIMEOUT_SEC): """Read the value of this characteristic.""" # Kick off a query to read the value of the characteristic, then wait # for the result to return asyncronously. self._value_read.clear() self._device._peripheral.readValueForCharacteristic_(self._characteristic) if not self._value_read.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting to read characteristic value!') return self._characteristic.value()
def write_value(self, value, write_type=0): """Write the specified value to this characteristic.""" data = NSData.dataWithBytes_length_(value, len(value)) self._device._peripheral.writeValue_forCharacteristic_type_(data, self._characteristic, write_type)
def start_notify(self, on_change): """Enable notification of changes for this characteristic on the specified on_change callback. on_change should be a function that takes one parameter which is the value (as a string of bytes) of the changed characteristic value. """ # Tell the device what callback to use for changes to this characteristic. self._device._notify_characteristic(self._characteristic, on_change) # Turn on notifications of characteristic changes. self._device._peripheral.setNotifyValue_forCharacteristic_(True, self._characteristic)
def read_value(self): """Read the value of this descriptor.""" pass # Kick off a query to read the value of the descriptor, then wait # for the result to return asyncronously. self._value_read.clear() self._device._peripheral.readValueForDescriptor(self._descriptor) if not self._value_read.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting to read characteristic value!') return self._value
def start_scan(self, timeout_sec=TIMEOUT_SEC): """Start scanning for BLE devices with this adapter.""" self._scan_started.clear() self._adapter.StartDiscovery() if not self._scan_started.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting for adapter to start scanning!')
def stop_scan(self, timeout_sec=TIMEOUT_SEC): """Stop scanning for BLE devices with this adapter.""" self._scan_stopped.clear() self._adapter.StopDiscovery() if not self._scan_stopped.wait(timeout_sec): raise RuntimeError('Exceeded timeout waiting for adapter to stop scanning!')
def centralManagerDidUpdateState_(self, manager): """Called when the BLE adapter is powered on and ready to scan/connect to devices. """ logger.debug('centralManagerDidUpdateState called') # Notify adapter about changed central state. get_provider()._adapter._state_changed(manager.state())
def centralManager_didDiscoverPeripheral_advertisementData_RSSI_(self, manager, peripheral, data, rssi): """Called when the BLE adapter found a device while scanning, or has new advertisement data for a device. """ logger.debug('centralManager_didDiscoverPeripheral_advertisementData_RSSI called') # Log name of device found while scanning. #logger.debug('Saw device advertised with name: {0}'.format(peripheral.name())) # Make sure the device is added to the list of devices and then update # its advertisement state. device = device_list().get(peripheral) if device is None: device = device_list().add(peripheral, CoreBluetoothDevice(peripheral)) device._update_advertised(data)
def centralManager_didConnectPeripheral_(self, manager, peripheral): """Called when a device is connected.""" logger.debug('centralManager_didConnectPeripheral called') # Setup peripheral delegate and kick off service discovery. For now just # assume all services need to be discovered. peripheral.setDelegate_(self) peripheral.discoverServices_(None) # Fire connected event for device. device = device_list().get(peripheral) if device is not None: device._set_connected()
def centralManager_didDisconnectPeripheral_error_(self, manager, peripheral, error): """Called when a device is disconnected.""" logger.debug('centralManager_didDisconnectPeripheral called') # Get the device and remove it from the device list, then fire its # disconnected event. device = device_list().get(peripheral) if device is not None: # Fire disconnected event and remove device from device list. device._set_disconnected() device_list().remove(peripheral)
def peripheral_didDiscoverServices_(self, peripheral, services): """Called when services are discovered for a device.""" logger.debug('peripheral_didDiscoverServices called') # Make sure the discovered services are added to the list of known # services, and kick off characteristic discovery for each one. # NOTE: For some reason the services parameter is never set to a good # value, instead you must query peripheral.services() to enumerate the # discovered services. for service in peripheral.services(): if service_list().get(service) is None: service_list().add(service, CoreBluetoothGattService(service)) # Kick off characteristic discovery for this service. Just discover # all characteristics for now. peripheral.discoverCharacteristics_forService_(None, service)
def peripheral_didDiscoverCharacteristicsForService_error_(self, peripheral, service, error): """Called when characteristics are discovered for a service.""" logger.debug('peripheral_didDiscoverCharacteristicsForService_error called') # Stop if there was some kind of error. if error is not None: return # Make sure the discovered characteristics are added to the list of known # characteristics, and kick off descriptor discovery for each char. for char in service.characteristics(): # Add to list of known characteristics. if characteristic_list().get(char) is None: characteristic_list().add(char, CoreBluetoothGattCharacteristic(char)) # Start descriptor discovery. peripheral.discoverDescriptorsForCharacteristic_(char) # Notify the device about the discovered characteristics. device = device_list().get(peripheral) if device is not None: device._characteristics_discovered(service)
def peripheral_didDiscoverDescriptorsForCharacteristic_error_(self, peripheral, characteristic, error): """Called when characteristics are discovered for a service.""" logger.debug('peripheral_didDiscoverDescriptorsForCharacteristic_error called') # Stop if there was some kind of error. if error is not None: return # Make sure the discovered descriptors are added to the list of known # descriptors. for desc in characteristic.descriptors(): # Add to list of known descriptors. if descriptor_list().get(desc) is None: descriptor_list().add(desc, CoreBluetoothGattDescriptor(desc))
def peripheral_didUpdateValueForCharacteristic_error_(self, peripheral, characteristic, error): """Called when characteristic value was read or updated.""" logger.debug('peripheral_didUpdateValueForCharacteristic_error called') # Stop if there was some kind of error. if error is not None: return # Notify the device about the updated characteristic value. device = device_list().get(peripheral) if device is not None: device._characteristic_changed(characteristic)
def peripheral_didUpdateValueForDescriptor_error_(self, peripheral, descriptor, error): """Called when descriptor value was read or updated.""" logger.debug('peripheral_didUpdateValueForDescriptor_error called') # Stop if there was some kind of error. if error is not None: return # Notify the device about the updated descriptor value. device = device_list().get(peripheral) if device is not None: device._descriptor_changed(descriptor)
def peripheral_didReadRSSI_error_(self, peripheral, rssi, error): """Called when a new RSSI value for the peripheral is available.""" logger.debug('peripheral_didReadRSSI_error called') # Note this appears to be completely undocumented at the time of this # writing. Can see more details at: # http://stackoverflow.com/questions/25952218/ios-8-corebluetooth-deprecated-rssi-methods # Stop if there was some kind of error. if error is not None: return # Notify the device about the updated RSSI value. device = device_list().get(peripheral) if device is not None: device._rssi_changed(rssi)
def initialize(self): """Initialize the BLE provider. Must be called once before any other calls are made to the provider. """ # Setup the central manager and its delegate. self._central_manager = CBCentralManager.alloc() self._central_manager.initWithDelegate_queue_options_(self._central_delegate, None, None)
def run_mainloop_with(self, target): """Start the OS's main loop to process asyncronous BLE events and then run the specified target function in a background thread. Target function should be a function that takes no parameters and optionally return an integer response code. When the target function stops executing or returns with value then the main loop will be stopped and the program will exit with the returned code. Note that an OS main loop is required to process asyncronous BLE events and this function is provided as a convenience for writing simple tools and scripts that don't need to be full-blown GUI applications. If you are writing a GUI application that has a main loop (a GTK glib main loop on Linux, or a Cocoa main loop on OSX) then you don't need to call this function. """ # Create background thread to run user code. self._user_thread = threading.Thread(target=self._user_thread_main, args=(target,)) self._user_thread.daemon = True self._user_thread.start() # Run main loop. This call will never return! try: AppHelper.runConsoleEventLoop(installInterrupt=True) except KeyboardInterrupt: AppHelper.stopEventLoop() sys.exit(0)
def _user_thread_main(self, target): """Main entry point for the thread that will run user's code.""" try: # Run user's code. return_code = target() # Assume good result (0 return code) if none is returned. if return_code is None: return_code = 0 # Call exit on the main thread when user code has finished. AppHelper.callAfter(lambda: sys.exit(return_code)) except Exception as ex: # Something went wrong. Raise the exception on the main thread to exit. AppHelper.callAfter(self._raise_error, sys.exc_info())
def clear_cached_data(self): """Clear the internal bluetooth device cache. This is useful if a device changes its state like name and it can't be detected with the new state anymore. WARNING: This will delete some files underneath the running user's ~/Library/Preferences/ folder! See this Stackoverflow question for information on what the function does: http://stackoverflow.com/questions/20553957/how-can-i-clear-the-corebluetooth-cache-on-macos """ # Turn off bluetooth. if self._adapter.is_powered: self._adapter.power_off() # Delete cache files and suppress any stdout/err output. with open(os.devnull, 'w') as devnull: subprocess.call('rm ~/Library/Preferences/com.apple.Bluetooth.plist', shell=True, stdout=devnull, stderr=subprocess.STDOUT) subprocess.call('rm ~/Library/Preferences/ByHost/com.apple.Bluetooth.*.plist', shell=True, stdout=devnull, stderr=subprocess.STDOUT)
def disconnect_devices(self, service_uuids): """Disconnect any connected devices that have any of the specified service UUIDs. """ # Get list of connected devices with specified services. cbuuids = map(uuid_to_cbuuid, service_uuids) for device in self._central_manager.retrieveConnectedPeripheralsWithServices_(cbuuids): self._central_manager.cancelPeripheralConnection_(device)
def initialize(self): """Initialize bluez DBus communication. Must be called before any other calls are made! """ # Ensure GLib's threading is initialized to support python threads, and # make a default mainloop that all DBus objects will inherit. These # commands MUST execute before any other DBus commands! GObject.threads_init() dbus.mainloop.glib.threads_init() # Set the default main loop, this also MUST happen before other DBus calls. self._mainloop = dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) # Get the main DBus system bus and root bluez object. self._bus = dbus.SystemBus() self._bluez = dbus.Interface(self._bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.ObjectManager')
def run_mainloop_with(self, target): """Start the OS's main loop to process asyncronous BLE events and then run the specified target function in a background thread. Target function should be a function that takes no parameters and optionally return an integer response code. When the target function stops executing or returns with value then the main loop will be stopped and the program will exit with the returned code. Note that an OS main loop is required to process asyncronous BLE events and this function is provided as a convenience for writing simple tools and scripts that don't need to be full-blown GUI applications. If you are writing a GUI application that has a main loop (a GTK glib main loop on Linux, or a Cocoa main loop on OSX) then you don't need to call this function. """ # Spin up a background thread to run the target code. self._user_thread = threading.Thread(target=self._user_thread_main, args=(target,)) self._user_thread.daemon = True # Don't let the user thread block exit. self._user_thread.start() # Spin up a GLib main loop in the main thread to process async BLE events. self._gobject_mainloop = GObject.MainLoop() try: self._gobject_mainloop.run() # Doesn't return until the mainloop ends. except KeyboardInterrupt: self._gobject_mainloop.quit() sys.exit(0) # Main loop finished. Check if an exception occured and throw it, # otherwise return the status code from the user code. if self._exception is not None: # Rethrow exception with its original stack trace following advice from: # http://nedbatchelder.com/blog/200711/rethrowing_exceptions_in_python.html raise_(self._exception[1], None, self._exception[2]) else: sys.exit(self._return_code)
def _user_thread_main(self, target): """Main entry point for the thread that will run user's code.""" try: # Wait for GLib main loop to start running before starting user code. while True: if self._gobject_mainloop is not None and self._gobject_mainloop.is_running(): # Main loop is running, we should be ready to make bluez DBus calls. break # Main loop isn't running yet, give time back to other threads. time.sleep(0) # Run user's code. self._return_code = target() # Assume good result (0 return code) if none is returned. if self._return_code is None: self._return_code = 0 # Signal the main loop to exit. self._gobject_mainloop.quit() except Exception as ex: # Something went wrong. Raise the exception on the main thread to # exit. self._exception = sys.exc_info() self._gobject_mainloop.quit()
def clear_cached_data(self): """Clear any internally cached BLE device data. Necessary in some cases to prevent issues with stale device data getting cached by the OS. """ # Go through and remove any device that isn't currently connected. for device in self.list_devices(): # Skip any connected device. if device.is_connected: continue # Remove this device. First get the adapter associated with the device. adapter = dbus.Interface(self._bus.get_object('org.bluez', device._adapter), _ADAPTER_INTERFACE) # Now call RemoveDevice on the adapter to remove the device from # bluez's DBus hierarchy. adapter.RemoveDevice(device._device.object_path)
def disconnect_devices(self, service_uuids=[]): """Disconnect any connected devices that have the specified list of service UUIDs. The default is an empty list which means all devices are disconnected. """ service_uuids = set(service_uuids) for device in self.list_devices(): # Skip devices that aren't connected. if not device.is_connected: continue device_uuids = set(map(lambda x: x.uuid, device.list_services())) if device_uuids >= service_uuids: # Found a device that has at least the requested services, now # disconnect from it. device.disconnect()
def _get_objects(self, interface, parent_path='/org/bluez'): """Return a list of all bluez DBus objects that implement the requested interface name and are under the specified path. The default is to search devices under the root of all bluez objects. """ # Iterate through all the objects in bluez's DBus hierarchy and return # any that implement the requested interface under the specified path. parent_path = parent_path.lower() objects = [] for opath, interfaces in iteritems(self._bluez.GetManagedObjects()): if interface in interfaces.keys() and opath.lower().startswith(parent_path): objects.append(self._bus.get_object('org.bluez', opath)) return objects
def _get_objects_by_path(self, paths): """Return a list of all bluez DBus objects from the provided list of paths. """ return map(lambda x: self._bus.get_object('org.bluez', x), paths)
def _print_tree(self): """Print tree of all bluez objects, useful for debugging.""" # This is based on the bluez sample code get-managed-objects.py. objects = self._bluez.GetManagedObjects() for path in objects.keys(): print("[ %s ]" % (path)) interfaces = objects[path] for interface in interfaces.keys(): if interface in ["org.freedesktop.DBus.Introspectable", "org.freedesktop.DBus.Properties"]: continue print(" %s" % (interface)) properties = interfaces[interface] for key in properties.keys(): print(" %s = %s" % (key, properties[key]))
def find_devices(self, service_uuids=[], name=None): """Return devices that advertise the specified service UUIDs and/or have the specified name. Service_uuids should be a list of Python uuid.UUID objects and is optional. Name is a string device name to look for and is also optional. Will not block, instead it returns immediately with a list of found devices (which might be empty). """ # Convert service UUID list to counter for quicker comparison. expected = set(service_uuids) # Grab all the devices. devices = self.list_devices() # Filter to just the devices that have the requested service UUID/name. found = [] for device in devices: if name is not None: if device.name == name: # Check if the name matches and add the device. found.append(device) else: # Check if the advertised UUIDs have at least the expected UUIDs. actual = set(device.advertised) if actual >= expected: found.append(device) return found
def find_device(self, service_uuids=[], name=None, timeout_sec=TIMEOUT_SEC): """Return the first device that advertises the specified service UUIDs or has the specified name. Will wait up to timeout_sec seconds for the device to be found, and if the timeout is zero then it will not wait at all and immediately return a result. When no device is found a value of None is returned. """ start = time.time() while True: # Call find_devices and grab the first result if any are found. found = self.find_devices(service_uuids, name) if len(found) > 0: return found[0] # No device was found. Check if the timeout is exceeded and wait to # try again. if time.time()-start >= timeout_sec: # Failed to find a device within the timeout. return None time.sleep(1)
def get_all(self, cbobjects): """Retrieve a list of metadata objects associated with the specified list of CoreBluetooth objects. If an object cannot be found then an exception is thrown. """ try: with self._lock: return [self._metadata[x] for x in cbobjects] except KeyError: # Note that if this error gets thrown then the assumption that OSX # will pass back to callbacks the exact CoreBluetooth objects that # were used previously is broken! (i.e. the CoreBluetooth objects # are not stateless) raise RuntimeError('Failed to find expected metadata for CoreBluetooth object!')
def add(self, cbobject, metadata): """Add the specified CoreBluetooth item with the associated metadata if it doesn't already exist. Returns the newly created or preexisting metadata item. """ with self._lock: if cbobject not in self._metadata: self._metadata[cbobject] = metadata return self._metadata[cbobject]
def remove(self, cbobject): """Remove any metadata associated with the provided CoreBluetooth object. """ with self._lock: if cbobject in self._metadata: del self._metadata[cbobject]
def cbuuid_to_uuid(cbuuid): """Convert Objective-C CBUUID type to native Python UUID type.""" data = cbuuid.data().bytes() template = '{:0>8}-0000-1000-8000-00805f9b34fb' if len(data) <= 4 else '{:0>32}' value = template.format(hexlify(data.tobytes()[:16]).decode('ascii')) return uuid.UUID(hex=value)
def set_color(self, r, g, b): """Set the red, green, blue color of the bulb.""" # See more details on the bulb's protocol from this guide: # https://learn.adafruit.com/reverse-engineering-a-bluetooth-low-energy-light-bulb/overview command = '\x58\x01\x03\x01\xFF\x00{0}{1}{2}'.format(chr(r & 0xFF), chr(g & 0xFF), chr(b & 0xFF)) self._color.write_value(command)
def get_provider(): """Return an instance of the BLE provider for the current platform.""" global _provider # Set the provider based on the current platform. if _provider is None: if sys.platform.startswith('linux'): # Linux platform from .bluez_dbus.provider import BluezProvider _provider = BluezProvider() elif sys.platform == 'darwin': # Mac OSX platform from .corebluetooth.provider import CoreBluetoothProvider _provider = CoreBluetoothProvider() else: # Unsupported platform raise RuntimeError('Sorry the {0} platform is not supported by the BLE library!'.format(sys.platform)) return _provider
def toBigInt(byteArray): """Convert the byte array to a BigInteger""" array = byteArray[::-1] # reverse array out = 0 for key, value in enumerate(array): decoded = struct.unpack("B", bytes([value]))[0] out = out | decoded << key * 8 return out
def encryptPassword(self, login, passwd): """Encrypt credentials using the google publickey, with the RSA algorithm""" # structure of the binary key: # # *-------------------------------------------------------* # | modulus_length | modulus | exponent_length | exponent | # *-------------------------------------------------------* # # modulus_length and exponent_length are uint32 binaryKey = b64decode(config.GOOGLE_PUBKEY) # modulus i = utils.readInt(binaryKey, 0) modulus = utils.toBigInt(binaryKey[4:][0:i]) # exponent j = utils.readInt(binaryKey, i + 4) exponent = utils.toBigInt(binaryKey[i + 8:][0:j]) # calculate SHA1 of the pub key digest = hashes.Hash(hashes.SHA1(), backend=default_backend()) digest.update(binaryKey) h = b'\x00' + digest.finalize()[0:4] # generate a public key der_data = encode_dss_signature(modulus, exponent) publicKey = load_der_public_key(der_data, backend=default_backend()) # encrypt email and password using pubkey to_be_encrypted = login.encode() + b'\x00' + passwd.encode() ciphertext = publicKey.encrypt( to_be_encrypted, padding.OAEP( mgf=padding.MGF1(algorithm=hashes.SHA1()), algorithm=hashes.SHA1(), label=None ) ) return urlsafe_b64encode(h + ciphertext)
def getHeaders(self, upload_fields=False): """Return the default set of request headers, which can later be expanded, based on the request type""" if upload_fields: headers = self.deviceBuilder.getDeviceUploadHeaders() else: headers = self.deviceBuilder.getBaseHeaders() if self.gsfId is not None: headers["X-DFE-Device-Id"] = "{0:x}".format(self.gsfId) if self.authSubToken is not None: headers["Authorization"] = "GoogleLogin auth=%s" % self.authSubToken if self.device_config_token is not None: headers["X-DFE-Device-Config-Token"] = self.device_config_token if self.deviceCheckinConsistencyToken is not None: headers["X-DFE-Device-Checkin-Consistency-Token"] = self.deviceCheckinConsistencyToken if self.dfeCookie is not None: headers["X-DFE-Cookie"] = self.dfeCookie return headers
def uploadDeviceConfig(self): """Upload the device configuration of the fake device selected in the __init__ methodi to the google account.""" upload = googleplay_pb2.UploadDeviceConfigRequest() upload.deviceConfiguration.CopyFrom(self.deviceBuilder.getDeviceConfig()) headers = self.getHeaders(upload_fields=True) stringRequest = upload.SerializeToString() response = requests.post(UPLOAD_URL, data=stringRequest, headers=headers, verify=ssl_verify, timeout=60, proxies=self.proxies_config) response = googleplay_pb2.ResponseWrapper.FromString(response.content) try: if response.payload.HasField('uploadDeviceConfigResponse'): self.device_config_token = response.payload.uploadDeviceConfigResponse self.device_config_token = self.device_config_token.uploadDeviceConfigToken except ValueError: pass
def login(self, email=None, password=None, gsfId=None, authSubToken=None): """Login to your Google Account. For first time login you should provide: * email * password For the following logins you need to provide: * gsfId * authSubToken""" if email is not None and password is not None: # First time setup, where we obtain an ac2dm token and # upload device information encryptedPass = self.encryptPassword(email, password).decode('utf-8') # AC2DM token params = self.deviceBuilder.getLoginParams(email, encryptedPass) params['service'] = 'ac2dm' params['add_account'] = '1' params['callerPkg'] = 'com.google.android.gms' headers = self.deviceBuilder.getAuthHeaders(self.gsfId) headers['app'] = 'com.google.android.gsm' response = requests.post(AUTH_URL, data=params, verify=ssl_verify, proxies=self.proxies_config) data = response.text.split() params = {} for d in data: if "=" not in d: continue k, v = d.split("=", 1) params[k.strip().lower()] = v.strip() if "auth" in params: ac2dmToken = params["auth"] elif "error" in params: if "NeedsBrowser" in params["error"]: raise SecurityCheckError("Security check is needed, try to visit " "https://accounts.google.com/b/0/DisplayUnlockCaptcha " "to unlock, or setup an app-specific password") raise LoginError("server says: " + params["error"]) else: raise LoginError("Auth token not found.") self.gsfId = self.checkin(email, ac2dmToken) self.getAuthSubToken(email, encryptedPass) self.uploadDeviceConfig() elif gsfId is not None and authSubToken is not None: # no need to initialize API self.gsfId = gsfId self.setAuthSubToken(authSubToken) # check if token is valid with a simple search self.search('drv') else: raise LoginError('Either (email,pass) or (gsfId, authSubToken) is needed')
def search(self, query): """ Search the play store for an app. nb_result (int): is the maximum number of result to be returned offset (int): is used to take result starting from an index. """ if self.authSubToken is None: raise LoginError("You need to login before executing any request") path = SEARCH_URL + "?c=3&q={}".format(requests.utils.quote(query)) # FIXME: not sure if this toc call should be here self.toc() data = self.executeRequestApi2(path) if utils.hasPrefetch(data): response = data.preFetch[0].response else: response = data resIterator = response.payload.listResponse.doc return list(map(utils.parseProtobufObj, resIterator))
def details(self, packageName): """Get app details from a package name. packageName is the app unique ID (usually starting with 'com.').""" path = DETAILS_URL + "?doc={}".format(requests.utils.quote(packageName)) data = self.executeRequestApi2(path) return utils.parseProtobufObj(data.payload.detailsResponse.docV2)
def bulkDetails(self, packageNames): """Get several apps details from a list of package names. This is much more efficient than calling N times details() since it requires only one request. If an item is not found it returns an empty object instead of throwing a RequestError('Item not found') like the details() function Args: packageNames (list): a list of app IDs (usually starting with 'com.'). Returns: a list of dictionaries containing docv2 data, or None if the app doesn't exist""" params = {'au': '1'} req = googleplay_pb2.BulkDetailsRequest() req.docid.extend(packageNames) data = req.SerializeToString() message = self.executeRequestApi2(BULK_URL, post_data=data.decode("utf-8"), content_type=CONTENT_TYPE_PROTO, params=params) response = message.payload.bulkDetailsResponse return [None if not utils.hasDoc(entry) else utils.parseProtobufObj(entry.doc) for entry in response.entry]
def browse(self, cat=None, subCat=None): """Browse categories. If neither cat nor subcat are specified, return a list of categories, otherwise it return a list of apps using cat (category ID) and subCat (subcategory ID) as filters.""" path = BROWSE_URL + "?c=3" if cat is not None: path += "&cat={}".format(requests.utils.quote(cat)) if subCat is not None: path += "&ctr={}".format(requests.utils.quote(subCat)) data = self.executeRequestApi2(path) return utils.parseProtobufObj(data.payload.browseResponse)
def list(self, cat, ctr=None, nb_results=None, offset=None): """List all possible subcategories for a specific category. If also a subcategory is provided, list apps from this category. Args: cat (str): category id ctr (str): subcategory id nb_results (int): if a subcategory is specified, limit number of results to this number offset (int): if a subcategory is specified, start counting from this result Returns: A list of categories. If subcategory is specified, a list of apps in this category. """ path = LIST_URL + "?c=3&cat={}".format(requests.utils.quote(cat)) if ctr is not None: path += "&ctr={}".format(requests.utils.quote(ctr)) if nb_results is not None: path += "&n={}".format(requests.utils.quote(str(nb_results))) if offset is not None: path += "&o={}".format(requests.utils.quote(str(offset))) data = self.executeRequestApi2(path) clusters = [] docs = [] if ctr is None: # list subcategories for pf in data.preFetch: for cluster in pf.response.payload.listResponse.doc: clusters.extend(cluster.child) return [c.docid for c in clusters] else: apps = [] for d in data.payload.listResponse.doc: # categories for c in d.child: # sub-category for a in c.child: # app apps.append(utils.parseProtobufObj(a)) return apps
def reviews(self, packageName, filterByDevice=False, sort=2, nb_results=None, offset=None): """Browse reviews for an application Args: packageName (str): app unique ID. filterByDevice (bool): filter results for current device sort (int): sorting criteria (values are unknown) nb_results (int): max number of reviews to return offset (int): return reviews starting from an offset value Returns: dict object containing all the protobuf data returned from the api """ # TODO: select the number of reviews to return path = REVIEWS_URL + "?doc={}&sort={}".format(requests.utils.quote(packageName), sort) if nb_results is not None: path += "&n={}".format(nb_results) if offset is not None: path += "&o={}".format(offset) if filterByDevice: path += "&dfil=1" data = self.executeRequestApi2(path) output = [] for review in data.payload.reviewResponse.getResponse.review: output.append(utils.parseProtobufObj(review)) return output
def delivery(self, packageName, versionCode=None, offerType=1, downloadToken=None, expansion_files=False): """Download an already purchased app. Args: packageName (str): app unique ID (usually starting with 'com.') versionCode (int): version to download offerType (int): different type of downloads (mostly unused for apks) downloadToken (str): download token returned by 'purchase' API progress_bar (bool): wether or not to print a progress bar to stdout Returns: Dictionary containing apk data and a list of expansion files. As stated in android documentation, there can be at most 2 expansion files, one with main content, and one for patching the main content. Their names should follow this format: [main|patch].<expansion-version>.<package-name>.obb Data to build this name string is provided in the dict object. For more info check https://developer.android.com/google/play/expansion-files.html """ if versionCode is None: # pick up latest version versionCode = self.details(packageName).get('versionCode') params = {'ot': str(offerType), 'doc': packageName, 'vc': str(versionCode)} headers = self.getHeaders() if downloadToken is not None: params['dtok'] = downloadToken response = requests.get(DELIVERY_URL, headers=headers, params=params, verify=ssl_verify, timeout=60, proxies=self.proxies_config) response = googleplay_pb2.ResponseWrapper.FromString(response.content) if response.commands.displayErrorMessage != "": raise RequestError(response.commands.displayErrorMessage) elif response.payload.deliveryResponse.appDeliveryData.downloadUrl == "": raise RequestError('App not purchased') else: result = {} result['docId'] = packageName result['additionalData'] = [] downloadUrl = response.payload.deliveryResponse.appDeliveryData.downloadUrl cookie = response.payload.deliveryResponse.appDeliveryData.downloadAuthCookie[0] cookies = { str(cookie.name): str(cookie.value) } result['file'] = self._deliver_data(downloadUrl, cookies) if not expansion_files: return result for obb in response.payload.deliveryResponse.appDeliveryData.additionalFile: a = {} # fileType == 0 -> main # fileType == 1 -> patch if obb.fileType == 0: obbType = 'main' else: obbType = 'patch' a['type'] = obbType a['versionCode'] = obb.versionCode a['file'] = self._deliver_data(obb.downloadUrl, None) result['additionalData'].append(a) return result
def download(self, packageName, versionCode=None, offerType=1, expansion_files=False): """Download an app and return its raw data (APK file). Free apps need to be "purchased" first, in order to retrieve the download cookie. If you want to download an already purchased app, use *delivery* method. Args: packageName (str): app unique ID (usually starting with 'com.') versionCode (int): version to download offerType (int): different type of downloads (mostly unused for apks) downloadToken (str): download token returned by 'purchase' API progress_bar (bool): wether or not to print a progress bar to stdout Returns Dictionary containing apk data and optional expansion files (see *delivery*) """ if self.authSubToken is None: raise LoginError("You need to login before executing any request") if versionCode is None: # pick up latest version appDetails = self.details(packageName).get('details').get('appDetails') versionCode = appDetails.get('versionCode') headers = self.getHeaders() params = {'ot': str(offerType), 'doc': packageName, 'vc': str(versionCode)} self.log(packageName) response = requests.post(PURCHASE_URL, headers=headers, params=params, verify=ssl_verify, timeout=60, proxies=self.proxies_config) response = googleplay_pb2.ResponseWrapper.FromString(response.content) if response.commands.displayErrorMessage != "": raise RequestError(response.commands.displayErrorMessage) else: dlToken = response.payload.buyResponse.downloadToken return self.delivery(packageName, versionCode, offerType, dlToken, expansion_files=expansion_files)
def http_connection(timeout): """ Decorator function that injects a requests.Session instance into the decorated function's actual parameters if not given. """ def wrapper(f): def wrapped(*args, **kwargs): if not ('connection' in kwargs) or not kwargs['connection']: connection = requests.Session() kwargs['connection'] = connection else: connection = kwargs['connection'] if not getattr(connection, 'timeout', False): connection.timeout = timeout connection.headers.update({'Content-type': 'application/json'}) return f(*args, **kwargs) return wraps(f)(wrapped) return wrapper
def create_token(self, data, options=None): """ Generates a secure authentication token. Our token format follows the JSON Web Token (JWT) standard: header.claims.signature Where: 1) 'header' is a stringified, base64-encoded JSON object containing version and algorithm information. 2) 'claims' is a stringified, base64-encoded JSON object containing a set of claims: Library-generated claims: 'iat' -> The issued at time in seconds since the epoch as a number 'd' -> The arbitrary JSON object supplied by the user. User-supplied claims (these are all optional): 'exp' (optional) -> The expiration time of this token, as a number of seconds since the epoch. 'nbf' (optional) -> The 'not before' time before which the token should be rejected (seconds since the epoch) 'admin' (optional) -> If set to true, this client will bypass all security rules (use this to authenticate servers) 'debug' (optional) -> 'set to true to make this client receive debug information about security rule execution. 'simulate' (optional, internal-only for now) -> Set to true to neuter all API operations (listens / puts will run security rules but not actually write or return data). 3) A signature that proves the validity of this token (see: http://tools.ietf.org/html/draft-ietf-jose-json-web-signature-07) For base64-encoding we use URL-safe base64 encoding. This ensures that the entire token is URL-safe and could, for instance, be placed as a query argument without any encoding (and this is what the JWT spec requires). Args: data - a json serializable object of data to be included in the token options - An optional dictionary of additional claims for the token. Possible keys include: a) 'expires' -- A timestamp (as a number of seconds since the epoch) denoting a time after which this token should no longer be valid. b) 'notBefore' -- A timestamp (as a number of seconds since the epoch) denoting a time before which this token should be rejected by the server. c) 'admin' -- Set to true to bypass all security rules (use this for your trusted servers). d) 'debug' -- Set to true to enable debug mode (so you can see the results of Rules API operations) e) 'simulate' -- (internal-only for now) Set to true to neuter all API operations (listens / puts will run security rules but not actually write or return data) Returns: A signed Firebase Authentication Token Raises: ValueError: if an invalid key is specified in options """ if not options: options = {} options.update({'admin': self.admin, 'debug': self.debug}) claims = self._create_options_claims(options) claims['v'] = self.TOKEN_VERSION claims['iat'] = int(time.mktime(time.gmtime())) claims['d'] = data return self._encode_token(self.secret, claims)
def make_get_request(url, params, headers, connection): """ Helper function that makes an HTTP GET request to the given firebase endpoint. Timeout is 60 seconds. `url`: The full URL of the firebase endpoint (DSN appended.) `params`: Python dict that is appended to the URL like a querystring. `headers`: Python dict. HTTP request headers. `connection`: Predefined HTTP connection instance. If not given, it is supplied by the `decorators.http_connection` function. The returning value is a Python dict deserialized by the JSON decoder. However, if the status code is not 2x or 403, an requests.HTTPError is raised. connection = connection_pool.get_available_connection() response = make_get_request('http://firebase.localhost/users', {'print': silent'}, {'X_FIREBASE_SOMETHING': 'Hi'}, connection) response => {'1': 'John Doe', '2': 'Jane Doe'} """ timeout = getattr(connection, 'timeout') response = connection.get(url, params=params, headers=headers, timeout=timeout) if response.ok or response.status_code == 403: return response.json() if response.content else None else: response.raise_for_status()
def make_put_request(url, data, params, headers, connection): """ Helper function that makes an HTTP PUT request to the given firebase endpoint. Timeout is 60 seconds. `url`: The full URL of the firebase endpoint (DSN appended.) `data`: JSON serializable dict that will be stored in the remote storage. `params`: Python dict that is appended to the URL like a querystring. `headers`: Python dict. HTTP request headers. `connection`: Predefined HTTP connection instance. If not given, it is supplied by the `decorators.http_connection` function. The returning value is a Python dict deserialized by the JSON decoder. However, if the status code is not 2x or 403, an requests.HTTPError is raised. connection = connection_pool.get_available_connection() response = make_put_request('http://firebase.localhost/users', '{"1": "Ozgur Vatansever"}', {'X_FIREBASE_SOMETHING': 'Hi'}, connection) response => {'1': 'Ozgur Vatansever'} or {'error': 'Permission denied.'} """ timeout = getattr(connection, 'timeout') response = connection.put(url, data=data, params=params, headers=headers, timeout=timeout) if response.ok or response.status_code == 403: return response.json() if response.content else None else: response.raise_for_status()
def make_post_request(url, data, params, headers, connection): """ Helper function that makes an HTTP POST request to the given firebase endpoint. Timeout is 60 seconds. `url`: The full URL of the firebase endpoint (DSN appended.) `data`: JSON serializable dict that will be stored in the remote storage. `params`: Python dict that is appended to the URL like a querystring. `headers`: Python dict. HTTP request headers. `connection`: Predefined HTTP connection instance. If not given, it is supplied by the `decorators.http_connection` function. The returning value is a Python dict deserialized by the JSON decoder. However, if the status code is not 2x or 403, an requests.HTTPError is raised. connection = connection_pool.get_available_connection() response = make_put_request('http://firebase.localhost/users/', '{"Ozgur Vatansever"}', {'X_FIREBASE_SOMETHING': 'Hi'}, connection) response => {u'name': u'-Inw6zol_2f5ThHwVcSe'} or {'error': 'Permission denied.'} """ timeout = getattr(connection, 'timeout') response = connection.post(url, data=data, params=params, headers=headers, timeout=timeout) if response.ok or response.status_code == 403: return response.json() if response.content else None else: response.raise_for_status()
def make_patch_request(url, data, params, headers, connection): """ Helper function that makes an HTTP PATCH request to the given firebase endpoint. Timeout is 60 seconds. `url`: The full URL of the firebase endpoint (DSN appended.) `data`: JSON serializable dict that will be stored in the remote storage. `params`: Python dict that is appended to the URL like a querystring. `headers`: Python dict. HTTP request headers. `connection`: Predefined HTTP connection instance. If not given, it is supplied by the `decorators.http_connection` function. The returning value is a Python dict deserialized by the JSON decoder. However, if the status code is not 2x or 403, an requests.HTTPError is raised. connection = connection_pool.get_available_connection() response = make_put_request('http://firebase.localhost/users/1', '{"Ozgur Vatansever"}', {'X_FIREBASE_SOMETHING': 'Hi'}, connection) response => {'Ozgur Vatansever'} or {'error': 'Permission denied.'} """ timeout = getattr(connection, 'timeout') response = connection.patch(url, data=data, params=params, headers=headers, timeout=timeout) if response.ok or response.status_code == 403: return response.json() if response.content else None else: response.raise_for_status()
def make_delete_request(url, params, headers, connection): """ Helper function that makes an HTTP DELETE request to the given firebase endpoint. Timeout is 60 seconds. `url`: The full URL of the firebase endpoint (DSN appended.) `params`: Python dict that is appended to the URL like a querystring. `headers`: Python dict. HTTP request headers. `connection`: Predefined HTTP connection instance. If not given, it is supplied by the `decorators.http_connection` function. The returning value is NULL. However, if the status code is not 2x or 403, an requests.HTTPError is raised. connection = connection_pool.get_available_connection() response = make_put_request('http://firebase.localhost/users/1', {'X_FIREBASE_SOMETHING': 'Hi'}, connection) response => NULL or {'error': 'Permission denied.'} """ timeout = getattr(connection, 'timeout') response = connection.delete(url, params=params, headers=headers, timeout=timeout) if response.ok or response.status_code == 403: return response.json() if response.content else None else: response.raise_for_status()
def get_user(self): """ Method that gets the authenticated user. The returning user has the token, email and the provider data. """ token = self.authenticator.create_token(self.extra) user_id = self.extra.get('id') return FirebaseUser(self.email, token, self.provider, user_id)
def _build_endpoint_url(self, url, name=None): """ Method that constructs a full url with the given url and the snapshot name. Example: full_url = _build_endpoint_url('/users', '1') full_url => 'http://firebase.localhost/users/1.json' """ if not url.endswith(self.URL_SEPERATOR): url = url + self.URL_SEPERATOR if name is None: name = '' return '%s%s%s' % (urlparse.urljoin(self.dsn, url), name, self.NAME_EXTENSION)
def _authenticate(self, params, headers): """ Method that simply adjusts authentication credentials for the request. `params` is the querystring of the request. `headers` is the header of the request. If auth instance is not provided to this class, this method simply returns without doing anything. """ if self.authentication: user = self.authentication.get_user() params.update({'auth': user.firebase_auth_token}) headers.update(self.authentication.authenticator.HEADERS)
def get(self, url, name, params=None, headers=None, connection=None): """ Synchronous GET request. """ if name is None: name = '' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) return make_get_request(endpoint, params, headers, connection=connection)
def get_async(self, url, name, callback=None, params=None, headers=None): """ Asynchronous GET request with the process pool. """ if name is None: name = '' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) process_pool.apply_async(make_get_request, args=(endpoint, params, headers), callback=callback)
def put(self, url, name, data, params=None, headers=None, connection=None): """ Synchronous PUT request. There will be no returning output from the server, because the request will be made with ``silent`` parameter. ``data`` must be a JSONable value. """ assert name, 'Snapshot name must be specified' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) data = json.dumps(data, cls=JSONEncoder) return make_put_request(endpoint, data, params, headers, connection=connection)
def put_async(self, url, name, data, callback=None, params=None, headers=None): """ Asynchronous PUT request with the process pool. """ if name is None: name = '' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) data = json.dumps(data, cls=JSONEncoder) process_pool.apply_async(make_put_request, args=(endpoint, data, params, headers), callback=callback)
def post(self, url, data, params=None, headers=None, connection=None): """ Synchronous POST request. ``data`` must be a JSONable value. """ params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, None) self._authenticate(params, headers) data = json.dumps(data, cls=JSONEncoder) return make_post_request(endpoint, data, params, headers, connection=connection)
def post_async(self, url, data, callback=None, params=None, headers=None): """ Asynchronous POST request with the process pool. """ params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, None) self._authenticate(params, headers) data = json.dumps(data, cls=JSONEncoder) process_pool.apply_async(make_post_request, args=(endpoint, data, params, headers), callback=callback)
def patch(self, url, data, params=None, headers=None, connection=None): """ Synchronous POST request. ``data`` must be a JSONable value. """ params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, None) self._authenticate(params, headers) data = json.dumps(data, cls=JSONEncoder) return make_patch_request(endpoint, data, params, headers, connection=connection)
def delete(self, url, name, params=None, headers=None, connection=None): """ Synchronous DELETE request. ``data`` must be a JSONable value. """ if not name: name = '' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) return make_delete_request(endpoint, params, headers, connection=connection)
def delete_async(self, url, name, callback=None, params=None, headers=None): """ Asynchronous DELETE request with the process pool. """ if not name: name = '' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) process_pool.apply_async(make_delete_request, args=(endpoint, params, headers), callback=callback)
def do_filter(qs, keywords, exclude=False): """ Filter queryset based on keywords. Support for multiple-selected parent values. """ and_q = Q() for keyword, value in iteritems(keywords): try: values = value.split(",") if len(values) > 0: or_q = Q() for value in values: or_q |= Q(**{keyword: value}) and_q &= or_q except AttributeError: # value can be a bool and_q &= Q(**{keyword: value}) if exclude: qs = qs.exclude(and_q) else: qs = qs.filter(and_q) return qs
def filterchain_all(request, app, model, field, foreign_key_app_name, foreign_key_model_name, foreign_key_field_name, value): """Returns filtered results followed by excluded results below.""" model_class = get_model(app, model) keywords = get_keywords(field, value) # SECURITY: Make sure all smart selects requests are opt-in foreign_model_class = get_model(foreign_key_app_name, foreign_key_model_name) if not any([(isinstance(f, ChainedManyToManyField) or isinstance(f, ChainedForeignKey)) for f in foreign_model_class._meta.get_fields()]): raise PermissionDenied("Smart select disallowed") # filter queryset using limit_choices_to limit_choices_to = get_limit_choices_to(foreign_key_app_name, foreign_key_model_name, foreign_key_field_name) queryset = get_queryset(model_class, limit_choices_to=limit_choices_to) filtered = list(do_filter(queryset, keywords)) # Sort results if model doesn't include a default ordering. if not getattr(model_class._meta, 'ordering', False): sort_results(list(filtered)) excluded = list(do_filter(queryset, keywords, exclude=True)) # Sort results if model doesn't include a default ordering. if not getattr(model_class._meta, 'ordering', False): sort_results(list(excluded)) # Empty choice to separate filtered and excluded results. empty_choice = {'value': "", 'display': "---------"} serialized_results = ( serialize_results(filtered) + [empty_choice] + serialize_results(excluded) ) return JsonResponse(serialized_results, safe=False)