desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Set operation mode (auto, cool, heat, off).'
def set_operation_mode(self, operation_mode):
if (operation_mode == STATE_OFF): self.device.tmode = 0 elif (operation_mode == STATE_AUTO): self.device.tmode = 3 elif (operation_mode == STATE_COOL): self.device.t_cool = (round((self._target_temperature * 2.0)) / 2.0) elif (operation_mode == STATE_HEAT): self.device.t_...
'Turn away on. The RTCOA app simulates away mode by using a hold.'
def turn_away_mode_on(self):
away_temp = None if (not self._away): self._prev_temp = self._target_temperature if (self._current_operation == STATE_HEAT): away_temp = self._away_temps[0] elif (self._current_operation == STATE_COOL): away_temp = self._away_temps[1] self._away = True sel...
'Turn away off.'
def turn_away_mode_off(self):
self._away = False self.set_temperature(temperature=self._prev_temp)
'Initialize HassLaMetricManager and connect to LaMetric.'
def __init__(self, client_id, client_secret):
from lmnotify import LaMetricManager _LOGGER.debug('Connecting to LaMetric') self.lmn = LaMetricManager(client_id, client_secret) self._client_id = client_id self._client_secret = client_secret
'Reconnect to LaMetric. This is usually necessary when the OAuth token is expired.'
def reconnect(self):
from lmnotify import LaMetricManager _LOGGER.debug('Reconnecting to LaMetric') self.lmn = LaMetricManager(self._client_id, self._client_secret)
'Return the global LaMetricManager instance.'
def manager(self):
return self.lmn
'Initialize the rss view.'
def __init__(self, url, requires_auth, title, items):
self.url = url self.requires_auth = requires_auth self._title = title self._items = items
'Generate the RSS view XML.'
@asyncio.coroutine def get(self, request, entity_id=None):
response = '<?xml version="1.0" encoding="utf-8"?>\n\n' response += '<rss>\n' if (self._title is not None): response += (' <title>%s</title>\n' % escape(self._title.async_render())) for item in self._items: response += ' <item>\n' if ('title' in item): ...
'Instantiate a template.'
def __init__(self, template, hass=None):
if (not isinstance(template, str)): raise TypeError('Expected template to be a string') self.template = template self._compiled_code = None self._compiled = None self.hass = hass
'Return if template is valid.'
def ensure_valid(self):
if (self._compiled_code is not None): return try: self._compiled_code = ENV.compile(self.template) except jinja2.exceptions.TemplateSyntaxError as err: raise TemplateError(err)
'Extract all entities for state_changed listener.'
def extract_entities(self):
return extract_entities(self.template)
'Render given template.'
def render(self, variables=None, **kwargs):
if (variables is not None): kwargs.update(variables) return run_callback_threadsafe(self.hass.loop, self.async_render, kwargs).result()
'Render given template. This method must be run in the event loop.'
def async_render(self, variables=None, **kwargs):
self._ensure_compiled() if (variables is not None): kwargs.update(variables) try: return self._compiled.render(kwargs).strip() except jinja2.TemplateError as err: raise TemplateError(err)
'Render template with value exposed. If valid JSON will expose value_json too.'
def render_with_possible_json_value(self, value, error_value=_SENTINEL):
return run_callback_threadsafe(self.hass.loop, self.async_render_with_possible_json_value, value, error_value).result()
'Render template with value exposed. If valid JSON will expose value_json too. This method must be run in the event loop.'
def async_render_with_possible_json_value(self, value, error_value=_SENTINEL):
self._ensure_compiled() variables = {'value': value} try: variables['value_json'] = json.loads(value) except ValueError: pass try: return self._compiled.render(variables).strip() except jinja2.TemplateError as ex: _LOGGER.error('Error parsing value: %s ...
'Bind a template to a specific hass instance.'
def _ensure_compiled(self):
if (self._compiled is not None): return self.ensure_valid() assert (self.hass is not None), 'hass variable not set on template' location_methods = LocationMethods(self.hass) global_vars = ENV.make_globals({'closest': location_methods.closest, 'distance': location_methods.dista...
'Compare template with another.'
def __eq__(self, other):
return ((self.__class__ == other.__class__) and (self.template == other.template) and (self.hass == other.hass))
'Initialize all states.'
def __init__(self, hass):
self._hass = hass
'Return the domain state.'
def __getattr__(self, name):
return DomainStates(self._hass, name)
'Return all states.'
def __iter__(self):
return iter(sorted(self._hass.states.async_all(), key=(lambda state: state.entity_id)))
'Return the states.'
def __call__(self, entity_id):
state = self._hass.states.get(entity_id) return (STATE_UNKNOWN if (state is None) else state.state)
'Initialize the domain states.'
def __init__(self, hass, domain):
self._hass = hass self._domain = domain
'Return the states.'
def __getattr__(self, name):
return self._hass.states.get('{}.{}'.format(self._domain, name))
'Return the iteration over all the states.'
def __iter__(self):
return iter(sorted((state for state in self._hass.states.async_all() if (state.domain == self._domain)), key=(lambda state: state.entity_id)))
'Initialize the distance helpers.'
def __init__(self, hass):
self._hass = hass
'Find closest entity. Closest to home: closest(states) closest(states.device_tracker) closest(\'group.children\') closest(states.group.children) Closest to a point: closest(23.456, 23.456, \'group.children\') closest(\'zone.school\', \'group.children\') closest(states.zone.school, \'group.children\')'
def closest(self, *args):
if (len(args) == 1): latitude = self._hass.config.latitude longitude = self._hass.config.longitude entities = args[0] elif (len(args) == 2): point_state = self._resolve_state(args[0]) if (point_state is None): _LOGGER.warning('Closest:Unable to find s...
'Calculate distance. Will calculate distance from home to a point or between points. Points can be passed in using state objects or lat/lng coordinates.'
def distance(self, *args):
locations = [] to_process = list(args) while to_process: value = to_process.pop(0) if isinstance(value, State): latitude = value.attributes.get(ATTR_LATITUDE) longitude = value.attributes.get(ATTR_LONGITUDE) if ((latitude is None) or (longitude is None)): ...
'Return state or entity_id if given.'
def _resolve_state(self, entity_id_or_state):
if isinstance(entity_id_or_state, State): return entity_id_or_state elif isinstance(entity_id_or_state, str): return self._hass.states.get(entity_id_or_state) return None
'Test if callback is safe.'
def is_safe_callable(self, obj):
return (isinstance(obj, AllStates) or super().is_safe_callable(obj))
'Initialize an EntityConfigDict.'
def __init__(self, exact=None, domain=None, glob=None):
self._cache = {} self._exact = exact self._domain = domain if (glob is None): compiled = None else: compiled = OrderedDict() for (key, value) in glob.items(): compiled[re.compile(fnmatch.translate(key))] = value self._glob = compiled
'Get config for an entity id.'
def get(self, entity_id):
if (entity_id in self._cache): return self._cache[entity_id] (domain, _) = split_entity_id(entity_id) result = self._cache[entity_id] = {} if ((self._domain is not None) and (domain in self._domain)): result.update(self._domain[domain]) if (self._glob is not None): for (patte...
'Writes current data to a csv file'
def write_csv(self):
fn = ('Webcam-pulse' + str(datetime.datetime.now())) fn = fn.replace(':', '_').replace('.', '_') data = np.vstack((self.processor.times, self.processor.samples)).T np.savetxt((fn + '.csv'), data, delimiter=',') print 'Writing csv'
'Toggles a motion lock on the processor\'s face detection component. Locking the forehead location in place significantly improves data quality, once a forehead has been sucessfully isolated.'
def toggle_search(self):
state = self.processor.find_faces_toggle() print ('face detection lock =', (not state))
'Toggles the data display.'
def toggle_display_plot(self):
if self.bpm_plot: print 'bpm plot disabled' self.bpm_plot = False destroyWindow(self.plot_title) else: print 'bpm plot enabled' if self.processor.find_faces: self.toggle_search() self.bpm_plot = True self.make_bpm_plot() mov...
'Creates and/or updates the data display'
def make_bpm_plot(self):
plotXY([[self.processor.times, self.processor.samples], [self.processor.freqs, self.processor.fft]], labels=[False, True], showmax=[False, 'bpm'], label_ndigits=[0, 0], showmax_digits=[0, 1], skip=[3, 3], name=self.plot_title, bg=self.processor.slices[0])
'Handle keystrokes, as set at the bottom of __init__() A plotting or camera frame window must have focus for keypresses to be detected.'
def key_handler(self):
self.pressed = (waitKey(10) & 255) if (self.pressed == 27): print 'Exiting' for cam in self.cameras: cam.cam.release() if self.send_serial: self.serial.close() sys.exit() for key in self.key_controls.keys(): if (chr(self.pressed) == key): ...
'Single iteration of the application\'s main loop.'
def main_loop(self):
frame = self.cameras[self.selected_cam].get_frame() (self.h, self.w, _c) = frame.shape self.processor.frame_in = frame self.processor.run(self.selected_cam) output_frame = self.processor.frame_out imshow('Processed', output_frame) if self.bpm_plot: self.make_bpm_plot() if self.se...
'Assert that the TPOT driver stores correct default values for all parameters.'
def test_default_param(self):
args = self.parser.parse_args(['tests.csv']) self.assertEqual(args.CROSSOVER_RATE, 0.1) self.assertEqual(args.DISABLE_UPDATE_CHECK, False) self.assertEqual(args.GENERATIONS, 100) self.assertEqual(args.INPUT_FILE, 'tests.csv') self.assertEqual(args.INPUT_SEPARATOR, ' DCTB ') self.assertEqual(...
'Assert that _print_args prints correct values for all parameters.'
def test_print_args(self):
args = self.parser.parse_args(['tests.csv']) with captured_output() as (out, err): _print_args(args) output = out.getvalue() expected_output = '\nTPOT settings:\nCONFIG_FILE DCTB = DCTB None\nCROSSOVER_RATE DCTB = DCTB 0.1\nGENERATIONS DCTB = DCTB 100\nINPUT_FILE DCTB = DCTB tests.csv\nINPUT_...
'Instance name is the same as the class name.'
@property def __name__(self):
return self.__class__.__name__
'Dummy function to fit in with the sklearn API.'
def fit(self, X, y=None):
return self
'Transform data by adding two virtual features. Parameters X: numpy ndarray, {n_samples, n_components} New data, where n_samples is the number of samples and n_components is the number of components. y: None Unused Returns X_transformed: array-like, shape (n_samples, n_features) The transformed feature set'
def transform(self, X, y=None):
X = check_array(X) n_features = X.shape[1] X_transformed = np.copy(X) non_zero_vector = np.count_nonzero(X_transformed, axis=1) non_zero = np.reshape(non_zero_vector, ((-1), 1)) zero_col = np.reshape((n_features - non_zero_vector), ((-1), 1)) X_transformed = np.hstack((non_zero, X_transforme...
'Create a StackingEstimator object. Parameters estimator: object with fit, predict, and predict_proba methods. The estimator to generate synthetic features from.'
def __init__(self, estimator):
self.estimator = estimator
'Fit the StackingEstimator meta-transformer. Parameters X: array-like of shape (n_samples, n_features) The training input samples. y: array-like, shape (n_samples,) The target values (integers that correspond to classes in classification, real numbers in regression). fit_params: Other estimator-specific parameters. Ret...
def fit(self, X, y=None, **fit_params):
self.estimator.fit(X, y, **fit_params) return self
'Transform data by adding two synthetic feature(s). Parameters X: numpy ndarray, {n_samples, n_components} New data, where n_samples is the number of samples and n_components is the number of components. Returns X_transformed: array-like, shape (n_samples, n_features + 1) or (n_samples, n_features + 1 + n_classes) for ...
def transform(self, X):
X = check_array(X) X_transformed = np.copy(X) if (issubclass(self.estimator.__class__, ClassifierMixin) and hasattr(self.estimator, 'predict_proba')): X_transformed = np.hstack((self.estimator.predict_proba(X), X)) X_transformed = np.hstack((np.reshape(self.estimator.predict(X), ((-1), 1)), X_tr...
'Set up the genetic programming algorithm for pipeline optimization. Parameters generations: int, optional (default: 100) Number of iterations to the run pipeline optimization process. Generally, TPOT will work better when you give it more generations (and therefore time) to optimize the pipeline. TPOT will evaluate PO...
def __init__(self, generations=100, population_size=100, offspring_size=None, mutation_rate=0.9, crossover_rate=0.1, scoring=None, cv=5, subsample=1.0, n_jobs=1, max_time_mins=None, max_eval_time_mins=5, random_state=None, config_dict=None, warm_start=False, verbosity=0, disable_update_check=False):
if (self.__class__.__name__ == 'TPOTBase'): raise RuntimeError('Do not instantiate the TPOTBase class directly; use TPOTRegressor or TPOTClassifier instead.') self.disable_update_check = disable_update_check if (not self.disable_update_check): update_check('t...
'Fit an optimized machine learning pipeline. Uses genetic programming to optimize a machine learning pipeline that maximizes score on the provided features and target. Performs internal k-fold cross-validaton to avoid overfitting on the training data. Parameters features: array-like {n_samples, n_features} Feature matr...
def fit(self, features, target, sample_weight=None, groups=None):
features = features.astype(np.float64) self._fitted_imputer = None if np.any(np.isnan(features)): features = self._impute_values(features) self._check_dataset(features, target) if (self.subsample < 1.0): (features, _, target, _) = train_test_split(features, target, train_size=self.su...
'Helper function to update the _optimized_pipeline field.'
def _update_top_pipeline(self):
if self._pareto_front: top_score = (- float('inf')) for (pipeline, pipeline_scores) in zip(self._pareto_front.items, reversed(self._pareto_front.keys)): if (pipeline_scores.wvalues[1] > top_score): self._optimized_pipeline = pipeline top_score = pipeline_s...
'Use the optimized pipeline to predict the target for a feature set. Parameters features: array-like {n_samples, n_features} Feature matrix Returns array-like: {n_samples} Predicted target for the samples in the feature matrix'
def predict(self, features):
if (not self.fitted_pipeline_): raise RuntimeError('A pipeline has not yet been optimized. Please call fit() first.') features = features.astype(np.float64) if np.any(np.isnan(features)): features = self._impute_values(features) return self.fitted_pipeline_....
'Call fit and predict in sequence. Parameters features: array-like {n_samples, n_features} Feature matrix target: array-like {n_samples} List of class labels for prediction Returns array-like: {n_samples} Predicted target for the provided features'
def fit_predict(self, features, target):
self.fit(features, target) return self.predict(features)
'Returns the score on the given testing data using the user-specified scoring function. Parameters testing_features: array-like {n_samples, n_features} Feature matrix of the testing set testing_target: array-like {n_samples} List of class labels for prediction in the testing set Returns accuracy_score: float The estima...
def score(self, testing_features, testing_target):
if (self.fitted_pipeline_ is None): raise RuntimeError('A pipeline has not yet been optimized. Please call fit() first.') score = SCORERS[self.scoring_function](self.fitted_pipeline_, testing_features.astype(np.float64), testing_target.astype(np.float64)) return abs(sco...
'Use the optimized pipeline to estimate the class probabilities for a feature set. Parameters features: array-like {n_samples, n_features} Feature matrix of the testing set Returns array-like: {n_samples, n_target} The class probabilities of the input samples'
def predict_proba(self, features):
if (not self.fitted_pipeline_): raise RuntimeError('A pipeline has not yet been optimized. Please call fit() first.') else: if (not hasattr(self.fitted_pipeline_, 'predict_proba')): raise RuntimeError('The fitted pipeline does not have ...
'Set the parameters of TPOT. Returns self'
def set_params(self, **params):
self.__init__(**params) return self
'Export the optimized pipeline as Python code. Parameters output_file_name: string String containing the path and file name of the desired output file Returns None'
def export(self, output_file_name):
if (self._optimized_pipeline is None): raise RuntimeError('A pipeline has not yet been optimized. Please call fit() first.') with open(output_file_name, 'w') as output_file: output_file.write(export_pipeline(self._optimized_pipeline, self.operators, self._pset))
'Impute missing values in a feature set. Parameters features: array-like {n_samples, n_features} A feature matrix Returns array-like {n_samples, n_features}'
def _impute_values(self, features):
if (self.verbosity > 1): print('Imputing missing values in feature set') if (self._fitted_imputer is None): self._fitted_imputer = Imputer(strategy='median', axis=1) self._fitted_imputer.fit(features) return self._fitted_imputer.transform(features)
'Check if a dataset has a valid feature set and labels. Parameters features: array-like {n_samples, n_features} Feature matrix target: array-like {n_samples} List of class labels for prediction Returns None'
def _check_dataset(self, features, target):
try: check_X_y(features, target, accept_sparse=False) except (AssertionError, ValueError): raise ValueError('Error: Input data is not in a valid format. Please confirm that the input data is scikit-learn compatible. For example, the ...
'Compile a DEAP pipeline into a sklearn pipeline. Parameters expr: DEAP individual The DEAP pipeline to be compiled Returns sklearn_pipeline: sklearn.pipeline.Pipeline'
def _compile_to_sklearn(self, expr):
sklearn_pipeline = generate_pipeline_code(expr_to_tree(expr, self._pset), self.operators) return eval(sklearn_pipeline, self.operators_context)
'Recursively iterate through all objects in the pipeline and set a given parameter. Parameters pipeline_steps: array-like List of (str, obj) tuples from a scikit-learn pipeline or related object parameter: str The parameter to assign a value for in each pipeline object value: any The value to assign the parameter to in...
def _set_param_recursive(self, pipeline_steps, parameter, value):
for (_, obj) in pipeline_steps: recursive_attrs = ['steps', 'transformer_list', 'estimators'] for attr in recursive_attrs: if hasattr(obj, attr): self._set_param_recursive(getattr(obj, attr), parameter, value) break else: if hasattr(obj...
'Determine the fit of the provided individuals. Parameters individuals: a list of DEAP individual One individual is a list of pipeline operators and model parameters that can be compiled by DEAP into a callable function features: numpy.ndarray {n_samples, n_features} A numpy matrix containing the training and testing f...
def _evaluate_individuals(self, individuals, features, target, sample_weight=None, groups=None):
if self.max_time_mins: total_mins_elapsed = ((datetime.now() - self._start_datetime).total_seconds() / 60.0) if (total_mins_elapsed >= self.max_time_mins): raise KeyboardInterrupt('{} minutes have elapsed. TPOT will close down.'.format(total_mins_elapsed)) (_, un...
'Perform a replacement, insertion, or shrink mutation on an individual. Parameters individual: DEAP individual A list of pipeline operators and model parameters that can be compiled by DEAP into a callable function Returns mut_ind: DEAP individual Returns the individual with one of the mutations applied to it'
@_pre_test def _random_mutation_operator(self, individual):
mutation_techniques = [partial(gp.mutInsert, pset=self._pset), partial(mutNodeReplacement, pset=self._pset), partial(gp.mutShrink)] return np.random.choice(mutation_techniques)(individual)
'Generate an expression where each leaf might have a different depth between min_ and max_. Parameters pset: PrimitiveSetTyped Primitive set from which primitives are selected. min_: int Minimum height of the produced trees. max_: int Maximum Height of the produced trees. type_: class The type that should return the tr...
def _gen_grow_safe(self, pset, min_, max_, type_=None):
def condition(height, depth, type_): 'Stop when the depth is equal to height or when a node should be a terminal.' return ((type_ not in [np.ndarray, Output_Array]) or (depth == height)) return self._generate(pset, min_, max_, condition, type_)
'Generate a Tree as a list of lists. The tree is build from the root to the leaves, and it stop growing when the condition is fulfilled. Parameters pset: PrimitiveSetTyped Primitive set from which primitives are selected. min_: int Minimum height of the produced trees. max_: int Maximum Height of the produced trees. co...
@_pre_test def _generate(self, pset, min_, max_, condition, type_=None):
if (type_ is None): type_ = pset.ret expr = [] height = np.random.randint(min_, max_) stack = [(0, type_)] while (len(stack) != 0): (depth, type_) = stack.pop() if condition(height, depth, type_): try: term = np.random.choice(pset.terminals[type_])...
'Reads in the bot configuration file and sets up the bot. Defaults to config.txt if no configuration file is specified. If you want to modify the bot configuration, edit your config.txt.'
def bot_setup(self, config_file='config.txt'):
with open(config_file, 'r') as in_file: for line in in_file: line = line.split(':') parameter = line[0].strip() value = line[1].strip() if (parameter in ['USERS_KEEP_FOLLOWING', 'USERS_KEEP_UNMUTED', 'USERS_KEEP_MUTED']): if (value != ''): ...
'Syncs the user\'s followers and follows locally so it isn\'t necessary to repeatedly look them up via the Twitter API. It is important to run this method at least daily so the bot is working with a relatively up-to-date version of the user\'s follows. Do not run this method too often, however, or it will quickly cause...
def sync_follows(self):
followers_status = self.TWITTER_CONNECTION.followers.ids(screen_name=self.BOT_CONFIG['TWITTER_HANDLE']) followers = set(followers_status['ids']) next_cursor = followers_status['next_cursor'] with open(self.BOT_CONFIG['FOLLOWERS_FILE'], 'w') as out_file: for follower in followers: out...
'Returns the set of users the bot has already followed in the past.'
def get_do_not_follow_list(self):
dnf_list = [] with open(self.BOT_CONFIG['ALREADY_FOLLOWED_FILE'], 'r') as in_file: for line in in_file: dnf_list.append(int(line)) return set(dnf_list)
'Returns the set of users that are currently following the user.'
def get_followers_list(self):
followers_list = [] with open(self.BOT_CONFIG['FOLLOWERS_FILE'], 'r') as in_file: for line in in_file: followers_list.append(int(line)) return set(followers_list)
'Returns the set of users that the user is currently following.'
def get_follows_list(self):
follows_list = [] with open(self.BOT_CONFIG['FOLLOWS_FILE'], 'r') as in_file: for line in in_file: follows_list.append(int(line)) return set(follows_list)
'Returns a list of tweets matching a phrase (hashtag, word, etc.).'
def search_tweets(self, phrase, count=100, result_type='recent'):
return self.TWITTER_CONNECTION.search.tweets(q=phrase, result_type=result_type, count=count)
'Favorites tweets that match a phrase (hashtag, word, etc.).'
def auto_fav(self, phrase, count=100, result_type='recent'):
result = self.search_tweets(phrase, count, result_type) for tweet in result['statuses']: try: if (tweet['user']['screen_name'] == self.BOT_CONFIG['TWITTER_HANDLE']): continue self.wait_on_action() result = self.TWITTER_CONNECTION.favorites.create(_id=t...
'Retweets tweets that match a phrase (hashtag, word, etc.).'
def auto_rt(self, phrase, count=100, result_type='recent'):
result = self.search_tweets(phrase, count, result_type) for tweet in result['statuses']: try: if (tweet['user']['screen_name'] == self.BOT_CONFIG['TWITTER_HANDLE']): continue self.wait_on_action() result = self.TWITTER_CONNECTION.statuses.retweet(id=tw...
'Follows anyone who tweets about a phrase (hashtag, word, etc.).'
def auto_follow(self, phrase, count=100, result_type='recent'):
result = self.search_tweets(phrase, count, result_type) following = self.get_follows_list() do_not_follow = self.get_do_not_follow_list() for tweet in result['statuses']: try: if ((tweet['user']['screen_name'] != self.BOT_CONFIG['TWITTER_HANDLE']) and (tweet['user']['id'] not in foll...
'Follows back everyone who\'s followed you.'
def auto_follow_followers(self, count=None):
following = self.get_follows_list() followers = self.get_followers_list() not_following_back = (followers - following) not_following_back = list(not_following_back)[:count] for user_id in not_following_back: try: self.wait_on_action() self.TWITTER_CONNECTION.friendshi...
'Follows the followers of a specified user.'
def auto_follow_followers_of_user(self, user_twitter_handle, count=100):
following = self.get_follows_list() followers_of_user = set(self.TWITTER_CONNECTION.followers.ids(screen_name=user_twitter_handle)['ids'][:count]) do_not_follow = self.get_do_not_follow_list() for user_id in followers_of_user: try: if ((user_id not in following) and (user_id not in d...
'Unfollows everyone who hasn\'t followed you back.'
def auto_unfollow_nonfollowers(self, count=None):
following = self.get_follows_list() followers = self.get_followers_list() not_following_back = (following - followers) not_following_back = list(not_following_back)[:count] already_followed = set(not_following_back) already_followed_list = [] with open(self.BOT_CONFIG['ALREADY_FOLLOWED_FILE'...
'Unfollows everyone that you are following(except those who you have specified not to)'
def auto_unfollow_all_followers(self, count=None):
following = self.get_follows_list() for user_id in following: if (user_id not in self.BOT_CONFIG['USERS_KEEP_FOLLOWING']): self.wait_on_action() self.TWITTER_CONNECTION.friendships.destroy(user_id=user_id) print(('Unfollowed %d' % user_id), file=sys.stdout)
'Mutes everyone that you are following.'
def auto_mute_following(self):
following = self.get_follows_list() muted = set(self.TWITTER_CONNECTION.mutes.users.ids(screen_name=self.BOT_CONFIG['TWITTER_HANDLE'])['ids']) not_muted = (following - muted) for user_id in not_muted: if (user_id not in self.BOT_CONFIG['USERS_KEEP_UNMUTED']): self.TWITTER_CONNECTION....
'Unmutes everyone that you have muted.'
def auto_unmute(self):
muted = set(self.TWITTER_CONNECTION.mutes.users.ids(screen_name=self.BOT_CONFIG['TWITTER_HANDLE'])['ids']) for user_id in muted: if (user_id not in self.BOT_CONFIG['USERS_KEEP_MUTED']): self.TWITTER_CONNECTION.mutes.users.destroy(user_id=user_id) print(('Unmuted %d' % user_id)...
'Posts a tweet.'
def send_tweet(self, message):
return self.TWITTER_CONNECTION.statuses.update(status=message)
'Add users to list slug that are tweeting phrase.'
def auto_add_to_list(self, phrase, list_slug, count=100, result_type='recent'):
result = self.search_tweets(phrase, count, result_type) for tweet in result['statuses']: try: if (tweet['user']['screen_name'] == self.BOT_CONFIG['TWITTER_HANDLE']): continue result = self.TWITTER_CONNECTION.lists.members.create(owner_screen_name=self.BOT_CONFIG['...
'Given block ids, return only the ones that are valid.'
def filter_valid_block_ids(self, block_ids, include_undecided=False):
block_ids = list(set(block_ids)) votes = query.get_votes_for_blocks_by_voter(self.connection, block_ids, self.me) votes = {vote['vote']['voting_for_block']: vote['vote']['is_block_valid'] for vote in votes} return [block_id for block_id in block_ids if votes.get(block_id, include_undecided)]
'Given items with block ids, return only the ones that are valid or undecided.'
def filter_valid_items(self, items, block_id_key=(lambda b: b[0])):
items = list(items) block_ids = map(block_id_key, items) valid_block_ids = set(self.filter_valid_block_ids(block_ids, True)) return [b for b in items if (block_id_key(b) in valid_block_ids)]
'Get outputs for a public key'
def get_outputs_by_public_key(self, public_key):
res = list(query.get_owned_ids(self.connection, public_key)) txs = [tx for (_, tx) in self.filter_valid_items(res)] return [TransactionLink(tx['id'], index) for tx in txs for (index, output) in enumerate(tx['outputs']) if condition_details_has_owner(output['condition']['details'], public_key)]
'Remove outputs that have been spent Args: outputs: list of TransactionLink'
def filter_spent_outputs(self, outputs):
links = [o.to_dict() for o in outputs] res = query.get_spending_transactions(self.connection, links) txs = [tx for (_, tx) in self.filter_valid_items(res)] spends = {TransactionLink.from_dict(input_['fulfills']) for tx in txs for input_ in tx['inputs']} return [ff for ff in outputs if (ff not in spe...
'Remove outputs that have not been spent Args: outputs: list of TransactionLink'
def filter_unspent_outputs(self, outputs):
links = [o.to_dict() for o in outputs] res = query.get_spending_transactions(self.connection, links) txs = [tx for (_, tx) in self.filter_valid_items(res)] spends = {TransactionLink.from_dict(input_['fulfills']) for tx in txs for input_ in tx['inputs']} return [ff for ff in outputs if (ff in spends)...
'Create an instance of an :class:`~.Input`. Args: fulfillment (:class:`cryptoconditions.Fulfillment`): A Fulfillment to be signed with a private key. owners_before (:obj:`list` of :obj:`str`): A list of owners after a Transaction was confirmed. fulfills (:class:`~bigchaindb.common.transaction. TransactionLink`, optiona...
def __init__(self, fulfillment, owners_before, fulfills=None):
if ((fulfills is not None) and (not isinstance(fulfills, TransactionLink))): raise TypeError('`fulfills` must be a TransactionLink instance') if (not isinstance(owners_before, list)): raise TypeError('`owners_after` must be a list instance') self.fulfillment = f...
'Transforms the object to a Python dictionary. Note: If an Input hasn\'t been signed yet, this method returns a dictionary representation. Returns: dict: The Input as an alternative serialization format.'
def to_dict(self):
try: fulfillment = self.fulfillment.serialize_uri() except (TypeError, AttributeError, ASN1EncodeError): fulfillment = _fulfillment_to_details(self.fulfillment) try: fulfills = self.fulfills.to_dict() except AttributeError: fulfills = None input_ = {'owners_before': s...
'Transforms a Python dictionary to an Input object. Note: Optionally, this method can also serialize a Cryptoconditions- Fulfillment that is not yet signed. Args: data (dict): The Input to be transformed. Returns: :class:`~bigchaindb.common.transaction.Input` Raises: InvalidSignature: If an Input\'s URI couldn\'t be pa...
@classmethod def from_dict(cls, data):
fulfillment = data['fulfillment'] if (not isinstance(fulfillment, Fulfillment)): try: fulfillment = Fulfillment.from_uri(data['fulfillment']) except ASN1DecodeError: raise InvalidSignature("Fulfillment URI couldn't been parsed") except TypeError: ...
'Create an instance of a :class:`~.TransactionLink`. Note: In an IPLD implementation, this class is not necessary anymore, as an IPLD link can simply point to an object, as well as an objects properties. So instead of having a (de)serializable class, we can have a simple IPLD link of the form: `/<tx_id>/transaction/out...
def __init__(self, txid=None, output=None):
self.txid = txid self.output = output
'Transforms a Python dictionary to a TransactionLink object. Args: link (dict): The link to be transformed. Returns: :class:`~bigchaindb.common.transaction.TransactionLink`'
@classmethod def from_dict(cls, link):
try: return cls(link['transaction_id'], link['output_index']) except TypeError: return cls()
'Transforms the object to a Python dictionary. Returns: (dict|None): The link as an alternative serialization format.'
def to_dict(self):
if ((self.txid is None) and (self.output is None)): return None else: return {'transaction_id': self.txid, 'output_index': self.output}
'Create an instance of a :class:`~.Output`. Args: fulfillment (:class:`cryptoconditions.Fulfillment`): A Fulfillment to extract a Condition from. public_keys (:obj:`list` of :obj:`str`, optional): A list of owners before a Transaction was confirmed. amount (int): The amount of Assets to be locked with this Output. Rais...
def __init__(self, fulfillment, public_keys=None, amount=1):
if ((not isinstance(public_keys, list)) and (public_keys is not None)): raise TypeError('`public_keys` must be a list instance or None') if (not isinstance(amount, int)): raise TypeError('`amount` must be an int') if (amount < 1): raise AmountError('`...
'Transforms the object to a Python dictionary. Note: A dictionary serialization of the Input the Output was derived from is always provided. Returns: dict: The Output as an alternative serialization format.'
def to_dict(self):
condition = {} try: condition['details'] = _fulfillment_to_details(self.fulfillment) except AttributeError: pass try: condition['uri'] = self.fulfillment.condition_uri except AttributeError: condition['uri'] = self.fulfillment output = {'public_keys': self.public_...
'Generates a Output from a specifically formed tuple or list. Note: If a ThresholdCondition has to be generated where the threshold is always the number of subconditions it is split between, a list of the following structure is sufficient: [(address|condition)*, [(address|condition)*, ...], ...] Args: public_keys (:obj...
@classmethod def generate(cls, public_keys, amount):
threshold = len(public_keys) if (not isinstance(amount, int)): raise TypeError('`amount` must be a int') if (amount < 1): raise AmountError('`amount` needs to be greater than zero') if (not isinstance(public_keys, list)): raise TypeError('`public_key...
'Generates ThresholdSha256 conditions from a list of new owners. Note: This method is intended only to be used with a reduce function. For a description on how to use this method, see :meth:`~.Output.generate`. Args: initial (:class:`cryptoconditions.ThresholdSha256`): A Condition representing the overall root. new_pub...
@classmethod def _gen_condition(cls, initial, new_public_keys):
try: threshold = len(new_public_keys) except TypeError: threshold = None if (isinstance(new_public_keys, list) and (len(new_public_keys) > 1)): ffill = ThresholdSha256(threshold=threshold) reduce(cls._gen_condition, new_public_keys, ffill) elif (isinstance(new_public_keys...
'Transforms a Python dictionary to an Output object. Note: To pass a serialization cycle multiple times, a Cryptoconditions Fulfillment needs to be present in the passed-in dictionary, as Condition URIs are not serializable anymore. Args: data (dict): The dict to be transformed. Returns: :class:`~bigchaindb.common.tran...
@classmethod def from_dict(cls, data):
try: fulfillment = _fulfillment_from_details(data['condition']['details']) except KeyError: fulfillment = data['condition']['uri'] try: amount = int(data['amount']) except ValueError: raise AmountError(('Invalid amount: %s' % data['amount'])) return cls(fulfillm...
'The constructor allows to create a customizable Transaction. Note: When no `version` is provided, one is being generated by this method. Args: operation (str): Defines the operation of the Transaction. asset (dict): Asset payload for this Transaction. inputs (:obj:`list` of :class:`~bigchaindb.common. transaction.Inpu...
def __init__(self, operation, asset, inputs=None, outputs=None, metadata=None, version=None):
if (operation not in Transaction.ALLOWED_OPERATIONS): allowed_ops = ', '.join(self.__class__.ALLOWED_OPERATIONS) raise ValueError('`operation` must be one of {}'.format(allowed_ops)) if ((operation in [Transaction.CREATE, Transaction.GENESIS]) and (asset is not None) and (not (...
'A simple way to generate a `CREATE` transaction. Note: This method currently supports the following Cryptoconditions use cases: - Ed25519 - ThresholdSha256 Additionally, it provides support for the following BigchainDB use cases: - Multiple inputs and outputs. Args: tx_signers (:obj:`list` of :obj:`str`): A list of ke...
@classmethod def create(cls, tx_signers, recipients, metadata=None, asset=None):
if (not isinstance(tx_signers, list)): raise TypeError('`tx_signers` must be a list instance') if (not isinstance(recipients, list)): raise TypeError('`recipients` must be a list instance') if (len(tx_signers) == 0): raise ValueError('`tx_signers` lis...
'A simple way to generate a `TRANSFER` transaction. Note: Different cases for threshold conditions: Combining multiple `inputs` with an arbitrary number of `recipients` can yield interesting cases for the creation of threshold conditions we\'d like to support. The following notation is proposed: 1. The index of a `reci...
@classmethod def transfer(cls, inputs, recipients, asset_id, metadata=None):
if (not isinstance(inputs, list)): raise TypeError('`inputs` must be a list instance') if (len(inputs) == 0): raise ValueError('`inputs` must contain at least one item') if (not isinstance(recipients, list)): raise TypeError('`recipients` must b...
'Converts a Transaction\'s outputs to spendable inputs. Note: Takes the Transaction\'s outputs and derives inputs from that can then be passed into `Transaction.transfer` as `inputs`. A list of integers can be passed to `indices` that defines which outputs should be returned as inputs. If no `indices` are passed (empty...
def to_inputs(self, indices=None):
indices = (indices or range(len(self.outputs))) return [Input(self.outputs[idx].fulfillment, self.outputs[idx].public_keys, TransactionLink(self.id, idx)) for idx in indices]
'Adds an input to a Transaction\'s list of inputs. Args: input_ (:class:`~bigchaindb.common.transaction. Input`): An Input to be added to the Transaction.'
def add_input(self, input_):
if (not isinstance(input_, Input)): raise TypeError('`input_` must be a Input instance') self.inputs.append(input_)