code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
# Copyright (c) 2012 Martin Garcia <newluxfero@gmail.com>
#
# This file is part of python-producteev, and is made available under
# MIT license. See LICENSE for the full details.
import re
import htmlentitydefs
def unescape(text):
"""
Removes HTML or XML character references and entities from a text string.
text -- The HTML (or XML) source text.
return -- The plain text, as a Unicode string, if necessary.
"""
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text)
|
magarcia/python-producteev
|
producteev/utils.py
|
Python
|
mit
| 1,036
|
import unittest
from charlesbot.slack.slack_channel_joined import SlackChannelJoined
from charlesbot.slack.slack_channel_left import SlackChannelLeft
from charlesbot.slack.slack_group_joined import SlackGroupJoined
from charlesbot.slack.slack_group_left import SlackGroupLeft
from charlesbot.slack.slack_message import SlackMessage
class TestSlackBaseObjectChildren(unittest.TestCase):
def test_slack_channel_joined_compatibility(self):
sc = SlackChannelJoined()
object_dict = {"type": "channel_joined"}
self.assertTrue(sc.is_compatible(object_dict))
def test_slack_channel_left_compatibility(self):
sc = SlackChannelLeft()
object_dict = {"type": "channel_left"}
self.assertTrue(sc.is_compatible(object_dict))
def test_slack_group_joined_compatibility(self):
sc = SlackGroupJoined()
object_dict = {"type": "group_joined"}
self.assertTrue(sc.is_compatible(object_dict))
def test_slack_group_left_compatibility(self):
sc = SlackGroupLeft()
object_dict = {"type": "group_left"}
self.assertTrue(sc.is_compatible(object_dict))
def test_slack_message_compatibility(self):
sc = SlackMessage()
object_dict = {"type": "message"}
self.assertTrue(sc.is_compatible(object_dict))
|
marvinpinto/charlesbot
|
tests/slack/test_slack_base_object_children.py
|
Python
|
mit
| 1,314
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import distutils.dir_util
import os
import shutil
import sys
def touch_file(file_path):
"""
Create a new empty file at file_path.
"""
parent_dir = os.path.abspath(os.path.join(file_path, os.pardir))
if not os.path.isdir(parent_dir):
os.makedirs(parent_dir)
with codecs.open(file_path, 'a'):
os.utime(file_path, None)
def copy_file(input_file, output_file, overwrite=False):
"""
Helper function to copy a file that adds an overwrite parameter.
"""
if os.path.isfile(output_file):
if overwrite:
print('File exists, overwriting')
shutil.copyfile(input_file, output_file)
else:
sys.exit('File exists, unable to continue: %s' % output_file)
else:
shutil.copyfile(input_file, output_file)
def copy_tree(input_dir, output_dir, overwrite=False):
"""
Helper function to copy a directory tree that adds an overwrite parameter.
"""
if os.path.isdir(output_dir):
if overwrite:
print('Directory exists, overwriting')
distutils.dir_util.copy_tree(input_dir, output_dir)
else:
sys.exit('Directory exists, unable to continue: %s' % output_dir)
else:
distutils.dir_util.copy_tree(input_dir, output_dir)
def get_file_paths_from_directory(dir_path):
"""
Walk a directory and create a list of all contained file_paths in all sub-directories.
"""
file_paths = []
for root, dirs, files in os.walk(dir_path):
for f in files:
file_paths.append(os.path.join(root, f))
return file_paths
def clean_dsstore(dir_path):
"""
Walk a directory and get rid of all those useless hidden .DS_Store files.
"""
for root, dirs, files in os.walk(dir_path):
for f in files:
if f == '.DS_Store':
os.remove(os.path.join(dir_path, root, f))
|
geberl/droppy-workspace
|
Tasks/DropPy.Common/file_tools.py
|
Python
|
mit
| 1,999
|
#!/usr/bin/env python
#########################################################################################
# KLEE-Unit
# Author: Jacob Torrey
# Date: 3/15/2016
#
# Script to auto-generate test harness and execute symbolically with KLEE for a passed C
# file
#########################################################################################
import sys
import os
import shutil
import re
import subprocess
from glob import glob
from pycparser import c_parser, c_ast
from ctags import CTags, TagEntry
def collect_klee_runs():
'''Navigates all KLEE output directories and performs basic triage'''
errs = glob('klee-out-*/*.err')
col = {'ptr': 0, 'free': 0, 'div': 0, 'abort': 0, 'assert': 0, 'user': 0, 'model': 0, 'exec': 0}
for e in errs:
e = re.sub(r'\.err', '', e)
e = re.sub(r'^.*test.*[0-9]\.', '', e)
col[e] += 1
print "Found " + str(len(errs)) + " errors in file"
print str(col)
def run_klee(filename, maxs = 180):
'''Runs KLEE on a given file'''
return subprocess.call(['klee', '--libc=uclibc', '--posix-runtime', '-max-time=' + str(maxs), filename])
def generate_c(filename, func):
'''Generates a test harness and temp C file for a passed function'''
# Copy to duplicate safe to trash
newfn = filename[:-2] + "_" + func[0] + '.c'
shutil.copyfile(filename, newfn)
# Rename main() to something not conflicting
f = open(newfn, 'r')
c = f.read()
f.close()
c = re.sub(r' main\s?\(', ' not_main(', c)
c = c + "\r\n\r\n"
# Generate our own main() using symbolic variables for every function argument
main = "int main(int argc, char **argv) {\r\n"
i = 0
fc = func[0] + "("
for f in func[1][1]:
vn = 'var' + str(i)
fc += vn + ", "
vl = f + " " + vn + ";\r\n"
vl += "klee_make_symbolic(&" + vn + ", sizeof(" + f + "), \"" + vn + "\");\r\n"
i += 1
main += vl
fc = fc[:-2] + ");\r\n"
main += fc
main += "return 0;\r\n}\r\n"
c += main
# Inject into temp file
f = open(newfn, 'w')
f.write(c)
f.close()
# Return temp file name
return newfn
def compile_c(filename, outname = 'kleeunit.bc'):
'''Compiles for execution with KLEE'''
ret = subprocess.call(['clang', '-g', '-emit-llvm', '-c', filename, '-o', outname])
#os.remove(filename)
return ret
def run_ctags(filename):
'''Executes the ctags command on the passed filename to generate the tags file'''
return subprocess.call(['ctags', filename])
def parse_pattern(pattern):
'''Parses a ctags pattern string'''
pattern = pattern[2:-2] + ";"
parser = c_parser.CParser()
try:
node = parser.parse(pattern, filename = '<stdin>')
except c_parser.ParseError:
print "Unable to parse pattern: " + pattern
sys.exit(-1)
return (node.ext[-1].name, _explain_type(node.ext[-1]))
def _explain_type(decl):
'''Recursively explains a type decl node'''
typ = type(decl)
if typ == c_ast.TypeDecl:
quals = ' '.join(decl.quals) + ' ' if decl.quals else ''
return quals + _explain_type(decl.type)
elif typ == c_ast.Typename or typ == c_ast.Decl:
return _explain_type(decl.type)
elif typ == c_ast.IdentifierType:
return ' '.join(decl.names)
elif typ == c_ast.PtrDecl:
quals = ' '.join(decl.quals) + ' ' if decl.quals else ''
return quals + _explain_type(decl.type) + "*"
elif typ == c_ast.ArrayDecl:
arr = 'array'
if decl.dim: arr += '[%s]' % decl.dim.value
return arr + " of " + _explain_type(decl.type)
elif typ == c_ast.FuncDecl:
if decl.args:
params = [_explain_type(param) for param in decl.args.params]
else:
params = []
return [_explain_type(decl.type), params]
def parse_ctags():
'''Returns a list of all the functions and their arguments found by ctags'''
try:
tf = CTags('tags')
except:
print "Unable to find tags file!"
sys.exit(-1)
entry = TagEntry()
l = []
if 0 == tf.first(entry):
return []
while True:
l.append(parse_pattern(entry['pattern']))
if 0 == tf.next(entry):
break
return l
def controller():
'''Main handler that dispatches calls for KLEE-unit'''
if len(sys.argv) != 2:
print "KLEE-Unit: Usage: " + sys.argv[0] + " file_to_analyze.c"
sys.exit(-1)
filename = sys.argv[1]
run_ctags(filename)
funcs = parse_ctags()
for f in funcs:
if f[0] == 'main':
continue
fn = generate_c(filename, f)
compile_c(fn, fn + '.bc')
run_klee(fn + '.bc')
collect_klee_runs()
if __name__ == "__main__":
controller()
|
ranok/sledgehammer
|
klee-unit/klee-unit.py
|
Python
|
mit
| 4,806
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('quotation', '0005_auto_20150828_2207'),
]
operations = [
migrations.CreateModel(
name='Orderingcompany',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=1024)),
],
),
]
|
lowitty/eeep
|
quotation/migrations/0006_orderingcompany.py
|
Python
|
mit
| 542
|
"""
Python Interchangeable Virtual Instrument Library
Copyright (c) 2012-2016 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from .tektronixOA5000 import *
class tektronixOA5012(tektronixOA5000):
"Tektronix OA5012 optical attenuator driver"
def __init__(self, *args, **kwargs):
self.__dict__.setdefault('_instrument_id', 'OA5012')
super(tektronixOA5012, self).__init__(*args, **kwargs)
|
Diti24/python-ivi
|
ivi/tektronix/tektronixOA5012.py
|
Python
|
mit
| 1,438
|
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from plaid.model.distribution_breakdown import DistributionBreakdown
globals()['DistributionBreakdown'] = DistributionBreakdown
class PayPeriodDetails(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('pay_frequency',): {
'None': None,
'PAY_FREQUENCY_UNKNOWN': "PAY_FREQUENCY_UNKNOWN",
'PAY_FREQUENCY_WEEKLY': "PAY_FREQUENCY_WEEKLY",
'PAY_FREQUENCY_BIWEEKLY': "PAY_FREQUENCY_BIWEEKLY",
'PAY_FREQUENCY_SEMIMONTHLY': "PAY_FREQUENCY_SEMIMONTHLY",
'PAY_FREQUENCY_MONTHLY': "PAY_FREQUENCY_MONTHLY",
'NULL': "null",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'check_amount': (float, none_type,), # noqa: E501
'distribution_breakdown': ([DistributionBreakdown],), # noqa: E501
'end_date': (date, none_type,), # noqa: E501
'gross_earnings': (float, none_type,), # noqa: E501
'pay_date': (date, none_type,), # noqa: E501
'pay_frequency': (str, none_type,), # noqa: E501
'pay_day': (date, none_type,), # noqa: E501
'start_date': (date, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'check_amount': 'check_amount', # noqa: E501
'distribution_breakdown': 'distribution_breakdown', # noqa: E501
'end_date': 'end_date', # noqa: E501
'gross_earnings': 'gross_earnings', # noqa: E501
'pay_date': 'pay_date', # noqa: E501
'pay_frequency': 'pay_frequency', # noqa: E501
'pay_day': 'pay_day', # noqa: E501
'start_date': 'start_date', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PayPeriodDetails - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
check_amount (float, none_type): The amount of the paycheck.. [optional] # noqa: E501
distribution_breakdown ([DistributionBreakdown]): [optional] # noqa: E501
end_date (date, none_type): The pay period end date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
gross_earnings (float, none_type): Total earnings before tax/deductions.. [optional] # noqa: E501
pay_date (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
pay_frequency (str, none_type): The frequency at which an individual is paid.. [optional] # noqa: E501
pay_day (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
start_date (date, none_type): The pay period start date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
|
plaid/plaid-python
|
plaid/model/pay_period_details.py
|
Python
|
mit
| 9,216
|
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from django.test.utils import override_settings
@override_settings(AUTHENTICATION_BACKENDS=
('django.contrib.auth.backends.ModelBackend',))
class StudentLoginLogoutTest(TestCase):
def setUp(self):
self.student = User(username='test')
self.student.set_password('test')
self.student.save()
self.client = Client()
def test_login_with_correct_info(self):
response = self.client.post(reverse('student_signin'),
{'username': 'test', 'password': 'test'})
self.assertRedirects(response, reverse('student_dashboard'))
def test_login_with_incorrect_info(self):
response = self.client.post(reverse('student_signin'),
{'username': 'wrong', 'password': '1'})
self.assertRedirects(response, reverse('student_index'))
def test_login_and_logout(self):
self.client.login(username='test', password='test')
response = self.client.get(reverse('student_signout'))
self.assertRedirects(response, reverse('student_index'))
@override_settings(AUTHENTICATION_BACKENDS=
('django.contrib.auth.backends.ModelBackend',))
class StudentProfileTest(TestCase):
def setUp(self):
self.student = User(username='test')
self.student.set_password('test')
self.student.save()
self.client = Client()
self.client.login(username='test', password='test')
def test_profile_exist(self):
self.assertTrue(self.student.profile)
self.assertEqual(self.student.profile.school_id, '')
self.assertEqual(self.student.profile.grade, '')
self.assertEqual(self.student.profile.class_num, '')
self.assertEqual(self.student.profile.phone_num, '')
self.assertEqual(self.student.profile.major, '')
def test_modified_profile(self):
response = self.client.post(reverse('update_student_profile'),
{'school_id': 'school_id',
'grade': 'grade',
'major': 'major',
'class_num': 'class_num',
'phone_num': ''})
self.assertEqual(response.content, '{"status_phrase": "ok"}')
self.assertEqual(self.student.profile.school_id, 'school_id')
self.assertEqual(self.student.profile.grade, 'grade')
self.assertEqual(self.student.profile.major, 'major')
self.assertEqual(self.student.profile.class_num, 'class_num')
self.assertEqual(self.student.profile.phone_num, '')
def test_modified_profile_illegally(self):
response = self.client.post(reverse('update_student_profile'),
{'school_id': 'school_id',
'grade': 'grade',
'major': 'major',
'class_num': '',
'phone_num': ''})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '{"status_phrase": "fail"}')
self.assertEqual(self.student.profile.school_id, '')
self.assertEqual(self.student.profile.grade, '')
self.assertEqual(self.student.profile.major, '')
self.assertEqual(self.student.profile.class_num, '')
self.assertEqual(self.student.profile.phone_num, '')
|
qdulab/OEMS
|
student/tests.py
|
Python
|
mit
| 3,632
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2009 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
Contains classes for working with the reaction model generated by RMG.
"""
import logging
import math
import numpy
import os.path
import itertools
from rmgpy.display import display
#import rmgpy.chemkin
import rmgpy.constants as constants
from rmgpy.quantity import Quantity
import rmgpy.species
from rmgpy.thermo import Wilhoit, NASA, ThermoData
from rmgpy.pdep import SingleExponentialDown
from rmgpy.statmech import Conformer
from rmgpy.data.base import Entry, ForbiddenStructureException
from rmgpy.data.thermo import *
from rmgpy.data.solvation import *
from rmgpy.data.kinetics import *
from rmgpy.data.statmech import *
from rmgpy.transport import TransportData
import rmgpy.data.rmg
#needed to call the generate3dTS method in Reaction class
from rmgpy.reaction import Reaction
from pdep import PDepReaction, PDepNetwork, PressureDependenceError
# generateThermoDataFromQM under the Species class imports the qm package
################################################################################
class Species(rmgpy.species.Species):
solventName = None
solventData = None
solventViscosity = None
diffusionTemp = None
def __init__(self, index=-1, label='', thermo=None, conformer=None,
molecule=None, transportData=None, molecularWeight=None,
dipoleMoment=None, polarizability=None, Zrot=None,
energyTransferModel=None, reactive=True, coreSizeAtCreation=0):
rmgpy.species.Species.__init__(self, index, label, thermo, conformer, molecule, transportData, molecularWeight, dipoleMoment, polarizability, Zrot, energyTransferModel, reactive)
self.coreSizeAtCreation = coreSizeAtCreation
def __reduce__(self):
"""
A helper function used when pickling an object.
"""
return (Species, (self.index, self.label, self.thermo, self.conformer, self.molecule, self.transportData, self.molecularWeight, self.dipoleMoment, self.polarizability, self.Zrot, self.energyTransferModel, self.reactive, self.coreSizeAtCreation),)
def generateThermoData(self, database, thermoClass=NASA, quantumMechanics=None):
"""
Generates thermo data, first checking Libraries, then using either QM or Database.
If quantumMechanics is not None, it is asked to calculate the thermo.
Failing that, the database is used.
The database generates the thermo data for each structure (resonance isomer),
picks that with lowest H298 value.
It then calls :meth:`processThermoData`, to convert (via Wilhoit) to NASA
and set the E0.
Result stored in `self.thermo` and returned.
"""
from rmgpy.data.thermo import saveEntry
thermo0 = None
thermo0 = database.thermo.getThermoDataFromLibraries(self)
if thermo0 is not None:
logging.info("Found thermo for {0} in thermo library".format(self.label))
assert len(thermo0) == 3, "thermo0 should be a tuple at this point: (thermoData, library, entry)"
thermo0 = thermo0[0]
elif quantumMechanics:
molecule = self.molecule[0]
if quantumMechanics.settings.onlyCyclics and not molecule.isCyclic():
pass
else: # try a QM calculation
if molecule.getRadicalCount() > quantumMechanics.settings.maxRadicalNumber:
# Too many radicals for direct calculation: use HBI.
logging.info("{0} radicals on {1} exceeds limit of {2}. Using HBI method.".format(
self.molecule[0].getRadicalCount(),
self.label,
quantumMechanics.settings.maxRadicalNumber,
))
# Need to estimate thermo via each resonance isomer
thermo = []
for molecule in self.molecule:
molecule.clearLabeledAtoms()
molecule.updateAtomTypes()
tdata = database.thermo.estimateRadicalThermoViaHBI(molecule, quantumMechanics.getThermoData)
if tdata is not None:
thermo.append(tdata)
if thermo:
H298 = numpy.array([t.getEnthalpy(298.) for t in thermo])
indices = H298.argsort()
for i, ind in enumerate(indices):
logging.info("Resonance isomer {0} {1} gives H298={2:.0f} J/mol".format(i, self.molecule[ind].toSMILES(), H298[ind]))
self.molecule = [self.molecule[ind] for ind in indices]
molecule = self.molecule[0]
thermo0 = thermo[indices[0]]
else:
pass
with open('thermoHBIcheck.txt','a') as f:
f.write('// {0!r}\n'.format(thermo0).replace('),','),\n// '))
f.write('{0}\n'.format(molecule.toSMILES()))
f.write('{0}\n\n'.format(molecule.toAdjacencyList(removeH=False)))
else: # Not too many radicals: do a direct calculation.
thermo0 = quantumMechanics.getThermoData(molecule) # returns None if it fails
if thermo0 is not None:
# Write the QM molecule thermo to a library so that can be used in future RMG jobs.
quantumMechanics.database.loadEntry(index = len(quantumMechanics.database.entries) + 1,
label = molecule.toSMILES(),
molecule = molecule.toAdjacencyList(),
thermo = thermo0,
shortDesc = thermo0.comment
)
if thermo0 is None:
thermo0 = database.thermo.getThermoData(self)
return self.processThermoData(database, thermo0, thermoClass)
def processThermoData(self, database, thermo0, thermoClass=NASA):
"""
Converts via Wilhoit into required `thermoClass` and sets `E0`.
Resulting thermo is stored (`self.thermo`) and returned.
"""
# Always convert to Wilhoit so we can compute E0
if isinstance(thermo0, Wilhoit):
wilhoit = thermo0
elif isinstance(thermo0, ThermoData):
Tdata = thermo0._Tdata.value_si
Cpdata = thermo0._Cpdata.value_si
H298 = thermo0._H298.value_si
S298 = thermo0._S298.value_si
Cp0 = thermo0._Cp0.value_si
CpInf = thermo0._CpInf.value_si
wilhoit = Wilhoit().fitToDataForConstantB(Tdata, Cpdata, Cp0, CpInf, H298, S298, B=1000.0)
else:
Cp0 = self.calculateCp0()
CpInf = self.calculateCpInf()
wilhoit = thermo0.toWilhoit(Cp0=Cp0, CpInf=CpInf)
wilhoit.comment = thermo0.comment
# Add on solvation correction
if Species.solventData:
#logging.info("Making solvent correction for {0}".format(Species.solventName))
soluteData = database.solvation.getSoluteData(self)
solvation_correction = database.solvation.getSolvationCorrection(soluteData, Species.solventData)
# correction is added to the entropy and enthalpy
wilhoit.S0.value_si = (wilhoit.S0.value_si + solvation_correction.entropy)
wilhoit.H0.value_si = (wilhoit.H0.value_si + solvation_correction.enthalpy)
# Compute E0 by extrapolation to 0 K
if self.conformer is None:
self.conformer = Conformer()
self.conformer.E0 = (wilhoit.getEnthalpy(1.0)*1e-3,"kJ/mol")
# Convert to desired thermo class
if isinstance(thermo0, thermoClass):
self.thermo = thermo0
# If we don't have an E0, copy it across from the Wilhoit that was fitted
if self.thermo.E0 is None:
self.thermo.E0 = wilhoit.E0
elif isinstance(wilhoit, thermoClass):
self.thermo = wilhoit
else:
self.thermo = wilhoit.toNASA(Tmin=100.0, Tmax=5000.0, Tint=1000.0)
if self.thermo.__class__ != thermo0.__class__:
# Compute RMS error of overall transformation
Tlist = numpy.array([300.0, 400.0, 500.0, 600.0, 800.0, 1000.0, 1500.0], numpy.float64)
err = 0.0
for T in Tlist:
err += (self.thermo.getHeatCapacity(T) - thermo0.getHeatCapacity(T))**2
err = math.sqrt(err/len(Tlist))/constants.R
logging.log(logging.WARNING if err > 0.2 else 0, 'Average RMS error in heat capacity fit to {0} = {1:g}*R'.format(self, err))
return self.thermo
def generateStatMech(self, database):
"""
Generate molecular degree of freedom data for the species. You must
have already provided a thermodynamics model using e.g.
:meth:`generateThermoData()`.
"""
if not self.hasThermo():
raise Exception("Unable to determine statmech model for species {0}: No thermodynamics model found.".format(self))
molecule = self.molecule[0]
conformer = database.statmech.getStatmechData(molecule, self.thermo)
if self.conformer is None:
self.conformer = Conformer()
self.conformer.E0 = self.thermo.E0
self.conformer.modes = conformer.modes
self.conformer.spinMultiplicity = conformer.spinMultiplicity
def generateTransportData(self, database):
"""
Generate the transportData parameters for the species.
"""
#count = sum([1 for atom in self.molecule[0].vertices if atom.isNonHydrogen()])
self.transportData = database.transport.getTransportProperties(self)[0]
#previous method for calculating transport properties
'''
if count == 1:
self.transportData.sigma = (3.758e-10,"m")
self.transportData.epsilon = (148.6,"K")
elif count == 2:
self.transportData.sigma = (4.443e-10,"m")
self.transportData.epsilon = (110.7,"K")
elif count == 3:
self.transportData.sigma = (5.118e-10,"m")
self.transportData.epsilon = (237.1,"K")
elif count == 4:
self.transportData.sigma = (4.687e-10,"m")
self.transportData.epsilon = (531.4,"K")
elif count == 5:
self.transportData.sigma = (5.784e-10,"m")
self.transportData.epsilon = (341.1,"K")
else:
self.transportData.sigma = (5.949e-10,"m")
self.transportData.epsilon = (399.3,"K")
'''
def generateEnergyTransferModel(self):
"""
Generate the collisional energy transfer model parameters for the
species. This "algorithm" is *very* much in need of improvement.
"""
self.energyTransferModel = SingleExponentialDown(
alpha0 = (300*0.011962,"kJ/mol"),
T0 = (300,"K"),
n = 0.85,
)
################################################################################
class ReactionModel:
"""
Represent a generic reaction model. A reaction model consists of `species`,
a list of species, and `reactions`, a list of reactions.
"""
def __init__(self, species=None, reactions=None):
self.species = species or []
self.reactions = reactions or []
################################################################################
class CoreEdgeReactionModel:
"""
Represent a reaction model constructed using a rate-based screening
algorithm. The species and reactions in the model itself are called the
*core*; the species and reactions identified as candidates for inclusion in
the model are called the *edge*. The attributes are:
========================= ==============================================================
Attribute Description
========================= ==============================================================
`core` The species and reactions of the current model core
`edge` The species and reactions of the current model edge
`networkDict` A dictionary of pressure-dependent reaction networks (:class:`Network` objects) indexed by source.
`networkList` A list of pressure-dependent reaction networks (:class:`Network` objects)
`networkCount` A counter for the number of pressure-dependent networks created
========================= ==============================================================
"""
def __init__(self, core=None, edge=None):
if core is None:
self.core = ReactionModel()
else:
self.core = core
if edge is None:
self.edge = ReactionModel()
else:
self.edge = edge
# The default tolerances mimic the original RMG behavior; no edge
# pruning takes place, and the simulation is interrupted as soon as
# a species flux higher than the validity
self.networkDict = {}
self.networkList = []
self.networkCount = 0
self.speciesDict = {}
self.reactionDict = {}
self.speciesCache = [None for i in range(4)]
self.speciesCounter = 0
self.reactionCounter = 0
self.newSpeciesList = []
self.newReactionList = []
self.outputSpeciesList = []
self.outputReactionList = []
self.pressureDependence = None
self.quantumMechanics = None
self.verboseComments = False
self.kineticsEstimator = 'group additivity'
self.speciesConstraints = {}
def checkForExistingSpecies(self, molecule):
"""
Check to see if an existing species contains the same
:class:`structure.Structure` as `structure`. Returns ``True`` or
``False`` and the matched species (if found, or ``None`` if not).
"""
# First check cache and return if species is found
for i, spec in enumerate(self.speciesCache):
if spec is not None:
for mol in spec.molecule:
if molecule.isIsomorphic(mol):
self.speciesCache.pop(i)
self.speciesCache.insert(0, spec)
return True, spec
# Return an existing species if a match is found
formula = molecule.getFormula()
try:
speciesList = self.speciesDict[formula]
except KeyError:
return False, None
for spec in speciesList:
if spec.isIsomorphic(molecule):
self.speciesCache.pop()
self.speciesCache.insert(0, spec)
return True, spec
# At this point we can conclude that the structure does not exist
return False, None
def makeNewSpecies(self, object, label='', reactive=True, checkForExisting=True):
"""
Formally create a new species from the specified `object`, which can be
either a :class:`Molecule` object or an :class:`rmgpy.species.Species`
object.
"""
if isinstance(object, rmgpy.species.Species):
molecule = object.molecule[0]
label = label if label != '' else object.label
reactive = object.reactive
else:
molecule = object
molecule.clearLabeledAtoms()
# If desired, check to ensure that the species is new; return the
# existing species if not new
if checkForExisting:
found, spec = self.checkForExistingSpecies(molecule)
if found: return spec, False
# Check that the structure is not forbidden
# If we're here then we're ready to make the new species
if label == '':
# Use SMILES as default format for label
# However, SMILES can contain slashes (to describe the
# stereochemistry around double bonds); since RMG doesn't
# distinguish cis and trans isomers, we'll just strip these out
# so that we can use the label in file paths
label = molecule.toSMILES().replace('/','').replace('\\','')
logging.debug('Creating new species {0}'.format(label))
spec = Species(index=self.speciesCounter+1, label=label, molecule=[molecule], reactive=reactive)
spec.coreSizeAtCreation = len(self.core.species)
spec.generateResonanceIsomers()
spec.molecularWeight = Quantity(spec.molecule[0].getMolecularWeight()*1000.,"amu")
# spec.generateTransportData(database)
spec.generateEnergyTransferModel()
formula = molecule.getFormula()
if formula in self.speciesDict:
self.speciesDict[formula].append(spec)
else:
self.speciesDict[formula] = [spec]
self.speciesCounter += 1
# Since the species is new, add it to the list of new species
self.newSpeciesList.append(spec)
return spec, True
def checkForExistingReaction(self, rxn):
"""
Check to see if an existing reaction has the same reactants, products, and
family as `rxn`. Returns :data:`True` or :data:`False` and the matched
reaction (if found).
"""
# Make sure the reactant and product lists are sorted before performing the check
rxn.reactants.sort()
rxn.products.sort()
# Get the short-list of reactions with the same family, reactant1 and reactant2
r1 = rxn.reactants[0]
if len(rxn.reactants)==1: r2 = None
else: r2 = rxn.reactants[1]
family = rxn.family
try:
my_reactionList = self.reactionDict[family][r1][r2][:]
except KeyError: # no such short-list: must be new, unless in seed.
my_reactionList = []
# if the family is its own reverse (H-Abstraction) then check the other direction
if isinstance(family,KineticsFamily) and family.ownReverse: # (family may be a KineticsLibrary)
# Get the short-list of reactions with the same family, product1 and product2
r1 = rxn.products[0]
if len(rxn.products)==1: r2 = None
else: r2 = rxn.products[1]
family = rxn.family
try:
my_reactionList.extend(self.reactionDict[family][r1][r2])
except KeyError: # no such short-list: must be new, unless in seed.
pass
# Now use short-list to check for matches. All should be in same forward direction.
for rxn0 in my_reactionList:
if (rxn0.reactants == rxn.reactants and rxn0.products == rxn.products):
return True, rxn0
if isinstance(family,KineticsFamily) and family.ownReverse:
if (rxn0.reactants == rxn.products and rxn0.products == rxn.reactants):
return True, rxn0
# Now check seed mechanisms
# We want to check for duplicates in *other* seed mechanisms, but allow
# duplicated *within* the same seed mechanism
for family0 in self.reactionDict:
if isinstance(family0, KineticsLibrary) and family0 != family:
# First check seed short-list in forward direction
r1 = rxn.reactants[0]
if len(rxn.reactants)==1: r2 = None
else: r2 = rxn.reactants[1]
try:
my_reactionList = self.reactionDict[family0][r1][r2]
except KeyError:
my_reactionList = []
for rxn0 in my_reactionList:
if (rxn0.reactants == rxn.reactants and rxn0.products == rxn.products) or \
(rxn0.reactants == rxn.products and rxn0.products == rxn.reactants):
return True, rxn0
# Now get the seed short-list of the reverse reaction
r1 = rxn.products[0]
if len(rxn.products)==1: r2 = None
else: r2 = rxn.products[1]
try:
my_reactionList = self.reactionDict[family0][r1][r2]
except KeyError:
my_reactionList = []
for rxn0 in my_reactionList:
if (rxn0.reactants == rxn.reactants and rxn0.products == rxn.products) or \
(rxn0.reactants == rxn.products and rxn0.products == rxn.reactants):
return True, rxn0
return False, None
def makeNewReaction(self, forward, checkExisting=True):
"""
Make a new reaction given a :class:`Reaction` object `forward`. The reaction is added to the global list
of reactions. Returns the reaction in the direction that corresponds to the
estimated kinetics, along with whether or not the reaction is new to the
global reaction list.
The forward direction is determined using the "is_reverse" attribute of the
reaction's family. If the reaction family is its own reverse, then it is
made such that the forward reaction is exothermic at 298K.
The forward reaction is appended to self.newReactionList if it is new.
"""
# Determine the proper species objects for all reactants and products
reactants = [self.makeNewSpecies(reactant)[0] for reactant in forward.reactants]
products = [self.makeNewSpecies(product)[0] for product in forward.products ]
if forward.pairs is not None:
for pairIndex in range(len(forward.pairs)):
reactantIndex = forward.reactants.index(forward.pairs[pairIndex][0])
productIndex = forward.products.index(forward.pairs[pairIndex][1])
forward.pairs[pairIndex] = (reactants[reactantIndex], products[productIndex])
if hasattr(forward, 'reverse'):
forward.reverse.pairs[pairIndex] = (products[productIndex], reactants[reactantIndex])
forward.reactants = reactants
forward.products = products
if checkExisting:
found, rxn = self.checkForExistingReaction(forward)
if found: return rxn, False
# Generate the reaction pairs if not yet defined
if forward.pairs is None:
forward.generatePairs()
if hasattr(forward, 'reverse'):
forward.reverse.generatePairs()
# Note in the log
if isinstance(forward, TemplateReaction):
logging.debug('Creating new {0} template reaction {1}'.format(forward.family.label, forward))
elif isinstance(forward, DepositoryReaction):
logging.debug('Creating new {0} reaction {1}'.format(forward.getSource().label, forward))
elif isinstance(forward, LibraryReaction):
logging.debug('Creating new library reaction {0}'.format(forward))
else:
raise Exception("Unrecognized reaction type {0!s}".format(forward.__class__))
# Add to the global dict/list of existing reactions (a list broken down by family, r1, r2)
# identify r1 and r2
r1 = forward.reactants[0]
r2 = None if len(forward.reactants) == 1 else forward.reactants[1]
family = forward.family
# make dictionary entries if necessary
if family not in self.reactionDict:
self.reactionDict[family] = {}
if not self.reactionDict[family].has_key(r1):
self.reactionDict[family][r1] = dict()
if not self.reactionDict[family][r1].has_key(r2):
self.reactionDict[family][r1][r2] = list()
# store this reaction at the top of the relevant short-list
self.reactionDict[family][r1][r2].insert(0, forward)
forward.index = self.reactionCounter + 1
self.reactionCounter += 1
# Since the reaction is new, add it to the list of new reactions
self.newReactionList.append(forward)
# Return newly created reaction
return forward, True
def makeNewPDepReaction(self, forward):
"""
Make a new pressure-dependent reaction based on a list of `reactants` and a
list of `products`. The reaction belongs to the specified `network` and
has pressure-dependent kinetics given by `kinetics`.
No checking for existing reactions is made here. The returned PDepReaction
object is not added to the global list of reactions, as that is intended
to represent only the high-pressure-limit set. The reactionCounter is
incremented, however, since the returned reaction can and will exist in
the model edge and/or core.
"""
# Don't create reverse reaction: all such reactions are treated as irreversible
# The reverse direction will come from a different partial network
# Note that this isn't guaranteed to satisfy thermodynamics (but will probably be close)
forward.reverse = None
forward.reversible = False
# Generate the reaction pairs if not yet defined
if forward.pairs is None:
forward.generatePairs()
# Set reaction index and increment the counter
forward.index = self.reactionCounter + 1
self.reactionCounter += 1
return forward
def react(self, database, speciesA, speciesB=None):
"""
Generates reactions involving :class:`rmgpy.species.Species` speciesA and speciesB.
"""
reactionList = []
if speciesB is None:
for moleculeA in speciesA.molecule:
reactionList.extend(database.kinetics.generateReactions([moleculeA], failsSpeciesConstraints=self.failsSpeciesConstraints))
moleculeA.clearLabeledAtoms()
else:
for moleculeA in speciesA.molecule:
for moleculeB in speciesB.molecule:
reactionList.extend(database.kinetics.generateReactions([moleculeA, moleculeB], failsSpeciesConstraints=self.failsSpeciesConstraints))
moleculeA.clearLabeledAtoms()
moleculeB.clearLabeledAtoms()
return reactionList
def enlarge(self, newObject):
"""
Enlarge a reaction model by processing the objects in the list `newObject`.
If `newObject` is a
:class:`rmg.species.Species` object, then the species is moved from
the edge to the core and reactions generated for that species, reacting
with itself and with all other species in the model core. If `newObject`
is a :class:`rmg.unirxn.network.Network` object, then reactions are
generated for the species in the network with the largest leak flux.
"""
database = rmgpy.data.rmg.database
if not isinstance(newObject, list):
newObject = [newObject]
numOldCoreSpecies = len(self.core.species)
numOldCoreReactions = len(self.core.reactions)
numOldEdgeSpecies = len(self.edge.species)
numOldEdgeReactions = len(self.edge.reactions)
reactionsMovedFromEdge = []
newReactionList = []; newSpeciesList = []
for obj in newObject:
self.newReactionList = []; self.newSpeciesList = []
newReactions = []
pdepNetwork = None
objectWasInEdge = False
if isinstance(obj, Species):
newSpecies = obj
objectWasInEdge = newSpecies in self.edge.species
if not newSpecies.reactive:
logging.info('NOT generating reactions for unreactive species {0}'.format(newSpecies))
else:
logging.info('Adding species {0} to model core'.format(newSpecies))
display(newSpecies) # if running in IPython --pylab mode, draws the picture!
# Find reactions involving the new species as unimolecular reactant
# or product (e.g. A <---> products)
newReactions.extend(self.react(database, newSpecies))
# Find reactions involving the new species as bimolecular reactants
# or products with other core species (e.g. A + B <---> products)
for coreSpecies in self.core.species:
if coreSpecies.reactive:
newReactions.extend(self.react(database, newSpecies, coreSpecies))
# Find reactions involving the new species as bimolecular reactants
# or products with itself (e.g. A + A <---> products)
newReactions.extend(self.react(database, newSpecies, newSpecies))
# Add new species
reactionsMovedFromEdge = self.addSpeciesToCore(newSpecies)
# Process the new reactions
# While adding to core/edge/pdep network, this clears atom labels:
self.processNewReactions(newReactions, newSpecies, pdepNetwork)
elif isinstance(obj, tuple) and isinstance(obj[0], PDepNetwork) and self.pressureDependence:
pdepNetwork, newSpecies = obj
newReactions.extend(pdepNetwork.exploreIsomer(newSpecies, self, database))
self.processNewReactions(newReactions, newSpecies, pdepNetwork)
else:
raise TypeError('Unable to use object {0} to enlarge reaction model; expecting an object of class rmg.model.Species or rmg.model.PDepNetwork, not {1}'.format(obj, obj.__class__))
# If there are any core species among the unimolecular product channels
# of any existing network, they need to be made included
for network in self.networkList:
network.updateConfigurations(self)
index = 0
while index < len(self.core.species):
species = self.core.species[index]
isomers = [isomer.species[0] for isomer in network.isomers]
if species in isomers and species not in network.explored:
network.explored.append(species)
continue
for products in network.products:
products = products.species
if len(products) == 1 and products[0] == species:
newReactions = network.exploreIsomer(species, self, database)
self.processNewReactions(newReactions, species, network)
network.updateConfigurations(self)
index = 0
break
else:
index += 1
if isinstance(obj, Species) and objectWasInEdge:
# moved one species from edge to core
numOldEdgeSpecies -= 1
# moved these reactions from edge to core
numOldEdgeReactions -= len(reactionsMovedFromEdge)
newSpeciesList.extend(self.newSpeciesList)
newReactionList.extend(self.newReactionList)
# Generate thermodynamics of new species
logging.info('Generating thermodynamics for new species...')
for spec in newSpeciesList:
spec.generateThermoData(database, quantumMechanics=self.quantumMechanics)
spec.generateTransportData(database)
# Generate kinetics of new reactions
logging.info('Generating kinetics for new reactions...')
for reaction in newReactionList:
# If the reaction already has kinetics (e.g. from a library),
# assume the kinetics are satisfactory
if reaction.kinetics is None:
# Set the reaction kinetics
kinetics, source, entry, isForward = self.generateKinetics(reaction)
reaction.kinetics = kinetics
# Flip the reaction direction if the kinetics are defined in the reverse direction
if not isForward:
reaction.reactants, reaction.products = reaction.products, reaction.reactants
reaction.pairs = [(p,r) for r,p in reaction.pairs]
if reaction.family.ownReverse and hasattr(reaction,'reverse'):
if not isForward:
reaction.template = reaction.reverse.template
# We're done with the "reverse" attribute, so delete it to save a bit of memory
delattr(reaction,'reverse')
# For new reactions, convert ArrheniusEP to Arrhenius, and fix barrier heights.
# self.newReactionList only contains *actually* new reactions, all in the forward direction.
for reaction in newReactionList:
# convert KineticsData to Arrhenius forms
if isinstance(reaction.kinetics, KineticsData):
reaction.kinetics = reaction.kinetics.toArrhenius()
# correct barrier heights of estimated kinetics
if isinstance(reaction,TemplateReaction) or isinstance(reaction,DepositoryReaction): # i.e. not LibraryReaction
reaction.fixBarrierHeight() # also converts ArrheniusEP to Arrhenius.
if self.pressureDependence and reaction.isUnimolecular():
# If this is going to be run through pressure dependence code,
# we need to make sure the barrier is positive.
reaction.fixBarrierHeight(forcePositive=True)
# Update unimolecular (pressure dependent) reaction networks
if self.pressureDependence:
# Recalculate k(T,P) values for modified networks
self.updateUnimolecularReactionNetworks(database)
logging.info('')
# Check new core reactions for Chemkin duplicates
newCoreReactions = self.core.reactions[numOldCoreReactions:]
checkedCoreReactions = self.core.reactions[:numOldCoreReactions]
from rmgpy.chemkin import markDuplicateReaction
for rxn in newCoreReactions:
markDuplicateReaction(rxn,checkedCoreReactions)
checkedCoreReactions.append(rxn)
self.printEnlargeSummary(
newCoreSpecies=self.core.species[numOldCoreSpecies:],
newCoreReactions=self.core.reactions[numOldCoreReactions:],
reactionsMovedFromEdge=reactionsMovedFromEdge,
newEdgeSpecies=self.edge.species[numOldEdgeSpecies:],
newEdgeReactions=self.edge.reactions[numOldEdgeReactions:]
)
logging.info('')
def processNewReactions(self, newReactions, newSpecies, pdepNetwork=None):
"""
Process a list of newly-generated reactions involving the new core
species or explored isomer `newSpecies` in network `pdepNetwork`.
Makes a reaction and decides where to put it: core, edge, or PDepNetwork.
"""
from rmgpy.data.kinetics.library import LibraryReaction
for rxn in newReactions:
rxn, isNew = self.makeNewReaction(rxn)
if isNew:
# We've made a new reaction, so make sure the species involved
# are in the core or edge
allSpeciesInCore = True
# Add the reactant and product species to the edge if necessary
# At the same time, check if all reactants and products are in the core
for spec in rxn.reactants:
if spec not in self.core.species:
allSpeciesInCore = False
if spec not in self.edge.species:
self.addSpeciesToEdge(spec)
for spec in rxn.products:
if spec not in self.core.species:
allSpeciesInCore = False
if spec not in self.edge.species:
self.addSpeciesToEdge(spec)
isomerAtoms = sum([len(spec.molecule[0].atoms) for spec in rxn.reactants])
# Decide whether or not to handle the reaction as a pressure-dependent reaction
pdep = True
if not self.pressureDependence:
# The pressure dependence option is turned off entirely
pdep = False
elif self.pressureDependence.maximumAtoms is not None and self.pressureDependence.maximumAtoms < isomerAtoms:
# The reaction involves so many atoms that pressure-dependent effects are assumed to be negligible
pdep = False
elif not (rxn.isIsomerization() or rxn.isDissociation() or rxn.isAssociation()):
# The reaction is not unimolecular in either direction, so it cannot be pressure-dependent
pdep = False
elif rxn.kinetics is not None and rxn.kinetics.isPressureDependent():
# The reaction already has pressure-dependent kinetics (e.g. from a reaction library)
pdep = False
# If pressure dependence is on, we only add reactions that are not unimolecular;
# unimolecular reactions will be added after processing the associated networks
if not pdep:
if not isNew:
# The reaction is not new, so it should already be in the core or edge
continue
if allSpeciesInCore:
#for reaction in self.core.reactions:
# if isinstance(reaction, Reaction) and reaction.isEquivalent(rxn): break
#else:
self.addReactionToCore(rxn)
else:
#for reaction in self.edge.reactions:
# if isinstance(reaction, Reaction) and reaction.isEquivalent(rxn): break
#else:
self.addReactionToEdge(rxn)
else:
# Add the reaction to the appropriate unimolecular reaction network
# If pdepNetwork is not None then that will be the network the
# (path) reactions are added to
# Note that this must be done even with reactions that are not new
# because of the way partial networks are explored
# Since PDepReactions are created as irreversible, not doing so
# would cause you to miss the reverse reactions!
net = self.addReactionToUnimolecularNetworks(rxn, newSpecies=newSpecies, network=pdepNetwork)
if isinstance(rxn, LibraryReaction):
# If reaction came from a reaction library, omit it from the core and edge so that it does
# not get double-counted with the pdep network
if rxn in self.core.reactions:
self.core.reactions.remove(rxn)
if rxn in self.edge.reactions:
self.edge.reactions.remove(rxn)
def generateKinetics(self, reaction):
"""
Generate best possible kinetics for the given `reaction` using the kinetics database.
"""
# Only reactions from families should be missing kinetics
assert isinstance(reaction, TemplateReaction)
# Get the kinetics for the reaction
kinetics, source, entry, isForward = reaction.family.getKinetics(reaction, template=reaction.template, degeneracy=reaction.degeneracy, estimator=self.kineticsEstimator, returnAllKinetics=False)
# Get the enthalpy of reaction at 298 K
H298 = reaction.getEnthalpyOfReaction(298)
G298 = reaction.getFreeEnergyOfReaction(298)
if reaction.family.ownReverse and hasattr(reaction,'reverse'):
# The kinetics family is its own reverse, so we could estimate kinetics in either direction
# First get the kinetics for the other direction
rev_kinetics, rev_source, rev_entry, rev_isForward = reaction.family.getKinetics(reaction.reverse, template=reaction.reverse.template, degeneracy=reaction.reverse.degeneracy, estimator=self.kineticsEstimator, returnAllKinetics=False)
# Now decide which direction's kinetics to keep
keepReverse = False
if (source is not None and rev_source is None):
# Only the forward has a source - use forward.
reason = "This direction matched an entry in {0}, the other was just an estimate.".format(source.label)
elif (source is None and rev_source is not None):
# Only the reverse has a source - use reverse.
keepReverse = True
reason = "This direction matched an entry in {0}, the other was just an estimate.".format(rev_source.label)
elif (source is not None and rev_source is not None
and entry is rev_entry):
# Both forward and reverse have the same source and entry
# Use the one for which the kinetics is the forward kinetics
reason = "Both direction matched the same entry in {0}, which is defined in this direction.".format(source.label)
keepReverse = not isForward
elif self.kineticsEstimator == 'group additivity' and (kinetics.comment.find("Fitted to 1 rate")>0
and not rev_kinetics.comment.find("Fitted to 1 rate")>0) :
# forward kinetics were fitted to only 1 rate, but reverse are hopefully better
keepReverse = True
reason = "Other direction matched a group only fitted to 1 rate."
elif self.kineticsEstimator == 'group additivity' and (not kinetics.comment.find("Fitted to 1 rate")>0
and rev_kinetics.comment.find("Fitted to 1 rate")>0) :
# reverse kinetics were fitted to only 1 rate, but forward are hopefully better
keepReverse = False
reason = "Other direction matched a group only fitted to 1 rate."
elif entry is not None and rev_entry is not None:
# Both directions matched explicit rate rules
# Keep the direction with the lower (but nonzero) rank
if entry.rank < rev_entry.rank and entry.rank != 0:
keepReverse = False
reason = "Both directions matched explicit rate rules, but this direction has a rule with a lower rank."
elif rev_entry.rank < entry.rank and rev_entry.rank != 0:
keepReverse = True
reason = "Both directions matched explicit rate rules, but this direction has a rule with a lower rank."
# Otherwise keep the direction that is exergonic at 298 K
else:
keepReverse = G298 > 0 and isForward and rev_isForward
reason = "Both directions matched explicit rate rules, but this direction is exergonic."
else:
# Keep the direction that is exergonic at 298 K
# This must be done after the thermo generation step
keepReverse = G298 > 0 and isForward and rev_isForward
reason = "Both directions are estimates, but this direction is exergonic."
if keepReverse:
kinetics = rev_kinetics
source = rev_source
entry = rev_entry
isForward = not rev_isForward
H298 = -H298
G298 = -G298
if self.verboseComments:
kinetics.comment += "\nKinetics were estimated in this direction instead of the reverse because:\n{0}".format(reason)
kinetics.comment += "\ndHrxn(298 K) = {0:.2f} kJ/mol, dGrxn(298 K) = {1:.2f} kJ/mol".format(H298 / 1000., G298 / 1000.)
# The comments generated by the database for estimated kinetics can
# be quite long, and therefore not very useful
# We don't want to waste lots of memory storing these long,
# uninformative strings, so here we replace them with much shorter ones
if not self.verboseComments:
# Only keep a short comment (to save memory)
if 'Exact' in kinetics.comment or 'Matched rule' in kinetics.comment:
# Exact match of rate rule
kinetics.comment = 'Exact match found for rate rule ({0})'.format(','.join([g.label for g in reaction.template]))
elif 'Matched reaction' in kinetics.comment:
# Stems from matching a reaction from a depository
pass
else:
# Estimated (averaged) rate rule
kinetics.comment = kinetics.comment[kinetics.comment.find('Estimated'):]
return kinetics, source, entry, isForward
def printEnlargeSummary(self, newCoreSpecies, newCoreReactions, newEdgeSpecies, newEdgeReactions, reactionsMovedFromEdge=None):
"""
Output a summary of a model enlargement step to the log. The details of
the enlargement are passed in the `newCoreSpecies`, `newCoreReactions`,
`newEdgeSpecies`, and `newEdgeReactions` objects.
"""
logging.info('')
logging.info('Summary of Model Enlargement')
logging.info('----------------------------')
logging.info('Added {0:d} new core species'.format(len(newCoreSpecies)))
for spec in newCoreSpecies:
display(spec)
logging.info(' {0}'.format(spec))
logging.info('Created {0:d} new edge species'.format(len(newEdgeSpecies)))
for spec in newEdgeSpecies:
display(spec)
logging.info(' {0}'.format(spec))
if reactionsMovedFromEdge:
logging.info('Moved {0:d} reactions from edge to core'.format(len(reactionsMovedFromEdge)))
for rxn in reactionsMovedFromEdge:
for r in newCoreReactions:
if ((r.reactants == rxn.reactants and r.products == rxn.products) or
(r.products == rxn.reactants and r.reactants == rxn.products)):
logging.info(' {0}'.format(r))
newCoreReactions.remove(r)
break
logging.info('Added {0:d} new core reactions'.format(len(newCoreReactions)))
for rxn in newCoreReactions:
logging.info(' {0}'.format(rxn))
logging.info('Created {0:d} new edge reactions'.format(len(newEdgeReactions)))
for rxn in newEdgeReactions:
logging.info(' {0}'.format(rxn))
coreSpeciesCount, coreReactionCount, edgeSpeciesCount, edgeReactionCount = self.getModelSize()
# Output current model size information after enlargement
logging.info('')
logging.info('After model enlargement:')
logging.info(' The model core has {0:d} species and {1:d} reactions'.format(coreSpeciesCount, coreReactionCount))
logging.info(' The model edge has {0:d} species and {1:d} reactions'.format(edgeSpeciesCount, edgeReactionCount))
logging.info('')
def addSpeciesToCore(self, spec):
"""
Add a species `spec` to the reaction model core (and remove from edge if
necessary). This function also moves any reactions in the edge that gain
core status as a result of this change in status to the core.
If this are any such reactions, they are returned in a list.
"""
assert spec not in self.core.species, "Tried to add species {0} to core, but it's already there".format(spec.label)
# Add the species to the core
self.core.species.append(spec)
rxnList = []
if spec in self.edge.species:
# If species was in edge, remove it
logging.debug("Removing species {0} from edge.".format(spec))
self.edge.species.remove(spec)
# Search edge for reactions that now contain only core species;
# these belong in the model core and will be moved there
for rxn in self.edge.reactions:
allCore = True
for reactant in rxn.reactants:
if reactant not in self.core.species: allCore = False
for product in rxn.products:
if product not in self.core.species: allCore = False
if allCore: rxnList.append(rxn)
# Move any identified reactions to the core
for rxn in rxnList:
self.addReactionToCore(rxn)
logging.debug("Moving reaction from edge to core: {0}".format(rxn))
return rxnList
def addSpeciesToEdge(self, spec):
"""
Add a species `spec` to the reaction model edge.
"""
self.edge.species.append(spec)
def prune(self, reactionSystems, fluxToleranceKeepInEdge, maximumEdgeSpecies):
"""
Remove species from the model edge based on the simulation results from
the list of `reactionSystems`.
"""
ineligibleSpecies = [] # A list of the species which are not eligible for pruning, for any reason
numCoreSpecies = len(self.core.species)
numEdgeSpecies = len(self.edge.species)
# All edge species that have not existed for more than two enlarge
# iterations are ineligible for pruning
for spec in self.edge.species:
if numCoreSpecies - spec.coreSizeAtCreation <= 2:
ineligibleSpecies.append(spec)
# Get the maximum species rates (and network leak rates)
# across all reaction systems
maxEdgeSpeciesRates = numpy.zeros((numEdgeSpecies), numpy.float64)
for reactionSystem in reactionSystems:
for i in range(numEdgeSpecies):
rate = reactionSystem.maxEdgeSpeciesRates[i]
if maxEdgeSpeciesRates[i] < rate:
maxEdgeSpeciesRates[i] = rate
for i in range(len(self.networkList)):
network = self.networkList[i]
rate = reactionSystem.maxNetworkLeakRates[i]
# Add the fraction of the network leak rate contributed by
# each unexplored species to that species' rate
# This is to ensure we have an overestimate of that species flux
ratios = network.getLeakBranchingRatios(reactionSystem.T.value_si,reactionSystem.P.value_si)
for spec, frac in ratios.iteritems():
index = self.edge.species.index(spec)
maxEdgeSpeciesRates[index] += frac * rate
# Mark any species that is explored in any partial network as ineligible for pruning
for spec in network.explored:
if spec not in ineligibleSpecies:
ineligibleSpecies.append(spec)
# Show the number of ineligible species
logging.info('Having {0} species ineligible to prune'.format(len(ineligibleSpecies)))
logging.info('Having edge species {0}, maximumEdgeSpecies {1}'.format(numEdgeSpecies, maximumEdgeSpecies))
# Sort the edge species rates by index
indices = numpy.argsort(maxEdgeSpeciesRates)
# Determine which species to prune
speciesToPrune = []
pruneDueToRateCounter = 0
for index in indices:
# Remove the species with rates below the pruning tolerance from the model edge
if maxEdgeSpeciesRates[index] < fluxToleranceKeepInEdge and self.edge.species[index] not in ineligibleSpecies:
speciesToPrune.append((index, self.edge.species[index]))
pruneDueToRateCounter += 1
# Keep removing species with the lowest rates until we are below the maximum edge species size
elif numEdgeSpecies - len(speciesToPrune) > maximumEdgeSpecies and self.edge.species[index] not in ineligibleSpecies:
speciesToPrune.append((index, self.edge.species[index]))
else:
continue
logging.info('Having {0} species to prune'.format(len(speciesToPrune)))
# Actually do the pruning
if pruneDueToRateCounter > 0:
logging.info('Pruning {0:d} species whose rates did not exceed the minimum threshold of {1:g}'.format(pruneDueToRateCounter, fluxToleranceKeepInEdge))
for index, spec in speciesToPrune[0:pruneDueToRateCounter]:
logging.info('Pruning species {0:<56}'.format(spec))
logging.debug(' {0:<56} {1:10.4e}'.format(spec, maxEdgeSpeciesRates[index]))
self.removeSpeciesFromEdge(spec)
if len(speciesToPrune) - pruneDueToRateCounter > 0:
logging.info('Pruning {0:d} species to obtain an edge size of {1:d} species'.format(len(speciesToPrune) - pruneDueToRateCounter, maximumEdgeSpecies))
for index, spec in speciesToPrune[pruneDueToRateCounter:]:
logging.info('Pruning species {0:<56}'.format(spec))
logging.debug(' {0:<56} {1:10.4e}'.format(spec, maxEdgeSpeciesRates[index]))
self.removeSpeciesFromEdge(spec)
# Delete any networks that became empty as a result of pruning
if self.pressureDependence:
networksToDelete = []
if len(network.pathReactions) == 0 and len(network.netReactions) == 0:
networksToDelete.append(network)
if len(networksToDelete) > 0:
logging.info('Deleting {0:d} empty pressure-dependent reaction networks'.format(len(networksToDelete)))
for network in networksToDelete:
logging.debug(' Deleting empty pressure dependent reaction network #{0:d}'.format(network.index))
source = network.source
nets_with_this_source = self.networkDict[source]
nets_with_this_source.remove(network)
if not nets_with_this_source:
del(self.networkDict[source])
self.networkList.remove(network)
logging.info('')
def removeSpeciesFromEdge(self, spec):
"""
Remove species `spec` from the reaction model edge.
"""
# remove the species
self.edge.species.remove(spec)
# identify any reactions it's involved in
rxnList = []
for rxn in self.edge.reactions:
if spec in rxn.reactants or spec in rxn.products:
rxnList.append(rxn)
# remove those reactions
for rxn in rxnList:
self.edge.reactions.remove(rxn)
# Remove the species from any unirxn networks it is in
if self.pressureDependence:
for network in self.networkList:
# Delete all path reactions involving the species
rxnList = []
for rxn in network.pathReactions:
if spec in rxn.reactants or spec in rxn.products:
rxnList.append(rxn)
if len(rxnList) > 0:
for rxn in rxnList:
network.pathReactions.remove(rxn)
# Delete all net reactions involving the species
rxnList = []
for rxn in network.netReactions:
if spec in rxn.reactants or spec in rxn.products:
rxnList.append(rxn)
for rxn in rxnList:
network.netReactions.remove(rxn)
# Recompute the isomers, reactants, and products for this network
network.updateConfigurations(self)
# Remove from the global list of reactions
# also remove it from the global list of reactions
for family in self.reactionDict:
if spec in self.reactionDict[family]:
del self.reactionDict[family][spec]
for reactant1 in self.reactionDict[family]:
if spec in self.reactionDict[family][reactant1]:
del self.reactionDict[family][reactant1][spec]
# remove from the global list of species, to free memory
formula = spec.molecule[0].getFormula()
self.speciesDict[formula].remove(spec)
if spec in self.speciesCache:
self.speciesCache.remove(spec)
self.speciesCache.append(None)
def addReactionToCore(self, rxn):
"""
Add a reaction `rxn` to the reaction model core (and remove from edge if
necessary). This function assumes `rxn` has already been checked to
ensure it is supposed to be a core reaction (i.e. all of its reactants
AND all of its products are in the list of core species).
"""
if rxn not in self.core.reactions:
self.core.reactions.append(rxn)
if rxn in self.edge.reactions:
self.edge.reactions.remove(rxn)
def addReactionToEdge(self, rxn):
"""
Add a reaction `rxn` to the reaction model edge. This function assumes
`rxn` has already been checked to ensure it is supposed to be an edge
reaction (i.e. all of its reactants OR all of its products are in the
list of core species, and the others are in either the core or the
edge).
"""
if rxn not in self.edge.reactions:
self.edge.reactions.append(rxn)
def getModelSize(self):
"""
Return the numbers of species and reactions in the model core and edge.
Note that this is not necessarily equal to the lengths of the
corresponding species and reaction lists.
"""
coreSpeciesCount = len(self.core.species)
coreReactionsCount = len(self.core.reactions)
edgeSpeciesCount = len(self.edge.species)
edgeReactionsCount = len(self.edge.reactions)
return (coreSpeciesCount, coreReactionsCount, edgeSpeciesCount, edgeReactionsCount)
def getLists(self):
"""
Return lists of all of the species and reactions in the core and the
edge.
"""
speciesList = []
speciesList.extend(self.core.species)
speciesList.extend(self.edge.species)
reactionList = []
reactionList.extend(self.core.reactions)
reactionList.extend(self.edge.reactions)
return speciesList, reactionList
def getStoichiometryMatrix(self):
"""
Return the stoichiometry matrix for all generated species and reactions.
The id of each species and reaction is the corresponding row and column,
respectively, in the matrix.
"""
speciesList, reactionList = self.getLists()
from scipy import sparse
stoichiometry = sparse.dok_matrix((self.speciesCounter, self.reactionCounter), float)
for rxn in reactionList:
j = rxn.index - 1
specList = rxn.reactants[:]; specList.extend(rxn.products)
for spec in specList:
i = spec.index - 1
nu = rxn.getStoichiometricCoefficient(spec)
if nu != 0: stoichiometry[i,j] = nu
return stoichiometry.tocsr()
def getReactionRates(self, T, P, Ci):
"""
Return an array of reaction rates for each reaction in the model core
and edge. The id of the reaction is the index into the vector.
"""
speciesList, reactionList = self.getLists()
rxnRate = numpy.zeros(self.reactionCounter, float)
for rxn in reactionList:
j = rxn.index - 1
rxnRate[j] = rxn.getRate(T, P, Ci)
return rxnRate
def addSeedMechanismToCore(self, seedMechanism, react=False):
"""
Add all species and reactions from `seedMechanism`, a
:class:`KineticsPrimaryDatabase` object, to the model core. If `react`
is ``True``, then reactions will also be generated between the seed
species. For large seed mechanisms this can be prohibitively expensive,
so it is not done by default.
"""
if react: raise NotImplementedError("react=True doesn't work yet")
database = rmgpy.data.rmg.database
self.newReactionList = []; self.newSpeciesList = []
numOldCoreSpecies = len(self.core.species)
numOldCoreReactions = len(self.core.reactions)
logging.info('Adding seed mechanism {0} to model core...'.format(seedMechanism))
seedMechanism = database.kinetics.libraries[seedMechanism]
for entry in seedMechanism.entries.values():
rxn = LibraryReaction(reactants=entry.item.reactants[:], products=entry.item.products[:], library=seedMechanism, kinetics=entry.data)
r, isNew = self.makeNewReaction(rxn) # updates self.newSpeciesList and self.newReactionlist
# Perform species constraints and forbidden species checks
for spec in self.newSpeciesList:
if database.forbiddenStructures.isMoleculeForbidden(spec.molecule[0]):
if 'allowed' in self.speciesConstraints and 'seed mechanisms' in self.speciesConstraints['allowed']:
logging.warning("Species {0} from seed mechanism {1} is globally forbidden. It will behave as an inert unless found in a seed mechanism or reaction library.".format(spec.label, seedMechanism.label))
else:
raise ForbiddenStructureException("Species {0} from seed mechanism {1} is globally forbidden. You may explicitly allow it, but it will remain inert unless found in a seed mechanism or reaction library.".format(spec.label, seedMechanism.label))
if self.failsSpeciesConstraints(spec):
if 'allowed' in self.speciesConstraints and 'seed mechanisms' in self.speciesConstraints['allowed']:
self.speciesConstraints['explicitlyAllowedMolecules'].extend(spec.molecule)
else:
raise ForbiddenStructureException("Species constraints forbids species {0} from seed mechanism {1}. Please reformulate constraints, remove the species, or explicitly allow it.".format(spec.label, seedMechanism.label))
for spec in self.newSpeciesList:
if spec.reactive: spec.generateThermoData(database, quantumMechanics=self.quantumMechanics)
spec.generateTransportData(database)
self.addSpeciesToCore(spec)
for rxn in self.newReactionList:
if self.pressureDependence and rxn.isUnimolecular():
# If this is going to be run through pressure dependence code,
# we need to make sure the barrier is positive.
# ...but are Seed Mechanisms run through PDep? Perhaps not.
for spec in itertools.chain(rxn.reactants, rxn.products):
if spec.thermo is None:
spec.generateThermoData(database, quantumMechanics=self.quantumMechanics)
rxn.fixBarrierHeight(forcePositive=True)
self.addReactionToCore(rxn)
# Check we didn't introduce unmarked duplicates
self.markChemkinDuplicates()
self.printEnlargeSummary(
newCoreSpecies=self.core.species[numOldCoreSpecies:],
newCoreReactions=self.core.reactions[numOldCoreReactions:],
newEdgeSpecies=[],
newEdgeReactions=[],
)
def addReactionLibraryToEdge(self, reactionLibrary):
"""
Add all species and reactions from `reactionLibrary`, a
:class:`KineticsPrimaryDatabase` object, to the model edge.
"""
database = rmgpy.data.rmg.database
self.newReactionList = []
self.newSpeciesList = []
numOldEdgeSpecies = len(self.edge.species)
numOldEdgeReactions = len(self.edge.reactions)
logging.info('Adding reaction library {0} to model edge...'.format(reactionLibrary))
reactionLibrary = database.kinetics.libraries[reactionLibrary]
for entry in reactionLibrary.entries.values():
rxn = LibraryReaction(reactants=entry.item.reactants[:], products=entry.item.products[:], library=reactionLibrary, kinetics=entry.data)
r, isNew = self.makeNewReaction(rxn) # updates self.newSpeciesList and self.newReactionlist
if not isNew: logging.info("This library reaction was not new: {0}".format(rxn))
# Perform species constraints and forbidden species checks
for spec in self.newSpeciesList:
if database.forbiddenStructures.isMoleculeForbidden(spec.molecule[0]):
if 'allowed' in self.speciesConstraints and 'reaction libraries' in self.speciesConstraints['allowed']:
logging.warning("Species {0} from reaction library {1} is globally forbidden. It will behave as an inert unless found in a seed mechanism or reaction library.".format(spec.label, reactionLibrary.label))
else:
raise ForbiddenStructureException("Species {0} from reaction library {1} is globally forbidden. You may explicitly allow it, but it will remain inert unless found in a seed mechanism or reaction library.".format(spec.label, reactionLibrary.label))
if self.failsSpeciesConstraints(spec):
if 'allowed' in self.speciesConstraints and 'reaction libraries' in self.speciesConstraints['allowed']:
self.speciesConstraints['explicitlyAllowedMolecules'].extend(spec.molecule)
else:
raise ForbiddenStructureException("Species constraints forbids species {0} from reaction library {1}. Please reformulate constraints, remove the species, or explicitly allow it.".format(spec.label, reactionLibrary.label))
for spec in self.newSpeciesList:
if spec.reactive: spec.generateThermoData(database, quantumMechanics=self.quantumMechanics)
spec.generateTransportData(database)
self.addSpeciesToEdge(spec)
for rxn in self.newReactionList:
# Note that we haven't actually evaluated any fluxes at this point
# Instead, we remove the comment below if the reaction is moved to
# the core later in the mechanism generation
self.addReactionToEdge(rxn)
self.printEnlargeSummary(
newCoreSpecies=[],
newCoreReactions=[],
newEdgeSpecies=self.edge.species[numOldEdgeSpecies:],
newEdgeReactions=self.edge.reactions[numOldEdgeReactions:],
)
def addReactionLibraryToOutput(self, reactionLib):
"""
Add all species and reactions from `reactionLibrary`, a
:class:`KineticsPrimaryDatabase` object, to the output.
This does not bring any of the reactions or species into the core itself.
"""
logging.info('Adding reaction library {0} to output file...'.format(reactionLib))
database = rmgpy.data.rmg.database
reactionLibrary = database.kinetics.libraries[reactionLib]
for entry in reactionLibrary.entries.values():
rxn = LibraryReaction(reactants=entry.item.reactants[:], products=entry.item.products[:], library=reactionLibrary, kinetics=entry.data)
rxn.reactants = [self.makeNewSpecies(reactant)[0] for reactant in rxn.reactants]
rxn.products = [self.makeNewSpecies(product)[0] for product in rxn.products]
for species in rxn.reactants:
if species not in self.core.species and species not in self.outputSpeciesList:
self.outputSpeciesList.append(species)
for species in rxn.products:
if species not in self.core.species and species not in self.outputSpeciesList:
self.outputSpeciesList.append(species)
# Reaction library was already on the edge, so we just need to get right label
rxn = self.checkForExistingReaction(rxn)[1]
if rxn in self.core.reactions:
rxn.kinetics.comment = ''
pass
else:
rxn.kinetics.comment = ("RMG did not find reaction rate to be high enough to be included in model core.")
self.outputReactionList.append(rxn)
self.markChemkinDuplicates()
def addReactionToUnimolecularNetworks(self, newReaction, newSpecies, network=None):
"""
Given a newly-created :class:`Reaction` object `newReaction`, update the
corresponding unimolecular reaction network. If no network exists, a new
one is created. If the new reaction is an isomerization that connects two
existing networks, the two networks are merged. This function is called
whenever a new high-pressure limit edge reaction is created. Returns the
network containing the new reaction.
"""
assert isinstance(newSpecies, Species)
# Put the reaction in the direction in which the new species is in the reactants
if newSpecies in newReaction.reactants:
reactants = newReaction.reactants[:]
products = newReaction.products[:]
else:
reactants = newReaction.products[:]
products = newReaction.products[:]
reactants.sort()
products.sort()
source = tuple(reactants)
# Only search for a network if we don't specify it as a parameter
if network is None:
if len(reactants) == 1:
# Find the network containing the reactant as the source
try:
networks = self.networkDict[source]
assert len(networks) == 1
network = networks[0]
except KeyError:
pass
elif len(reactants) > 1:
# Find the network containing the reactants as the source AND the
# product as an explored isomer
try:
networks = self.networkDict[source]
for n in networks:
if products[0] in n.explored:
assert network is None
network = n
except KeyError:
pass
else:
return None
# If no suitable network exists, create a new one
if network is None:
self.networkCount += 1
network = PDepNetwork(index=self.networkCount, source=reactants[:])
# should the source passed to PDepNetwork constuctor be a tuple not a list? that's what is used in networkDict
try:
self.networkDict[source].append(network)
except KeyError:
self.networkDict[source] = [network]
self.networkList.append(network)
# Add the path reaction to that network
network.addPathReaction(newReaction, newSpecies)
# Return the network that the reaction was added to
return network
def updateUnimolecularReactionNetworks(self, database):
"""
Iterate through all of the currently-existing unimolecular reaction
networks, updating those that have been marked as invalid. In each update,
the phenomonological rate coefficients :math:`k(T,P)` are computed for
each net reaction in the network, and the resulting reactions added or
updated.
"""
# Merge networks if necessary
# Two partial networks having the same source and containing one or
# more explored isomers in common must be merged together to avoid
# double-counting of rates
for source, networks in self.networkDict.iteritems():
networkCount = len(networks)
for index0, network0 in enumerate(networks):
index = index0 + 1
while index < networkCount:
found = False
network = networks[index]
if network0.source == network.source:
# The networks contain the same source, but do they contain any common included isomers (other than the source)?
for isomer in network0.explored:
if isomer != network.source and isomer in network.explored:
# The networks contain an included isomer in common, so we need to merge them
found = True
break
if found:
# The networks contain the same source and one or more common included isomers
# Therefore they need to be merged together
logging.info('Merging PDepNetwork #{0:d} and PDepNetwork #{1:d}'.format(network0.index, network.index))
network0.merge(network)
networks.remove(network)
self.networkList.remove(network)
networkCount -= 1
else:
index += 1
count = sum([1 for network in self.networkList if not network.valid and not (len(network.explored) == 0 and len(network.source) > 1)])
logging.info('Updating {0:d} modified unimolecular reaction networks...'.format(count))
# Iterate over all the networks, updating the invalid ones as necessary
# self = reactionModel object
updatedNetworks = []
for network in self.networkList:
if not network.valid:
network.update(self, database, self.pressureDependence)
updatedNetworks.append(network)
# PDepReaction objects generated from partial networks are irreversible
# However, it makes more sense to have reversible reactions in the core
# Thus we mark PDepReaction objects as reversible and remove the reverse
# direction from the list of core reactions
# Note that well-skipping reactions may not have a reverse if the well
# that they skip over is not itself in the core
for network in updatedNetworks:
for reaction in network.netReactions:
try:
index = self.core.reactions.index(reaction)
except ValueError:
continue
for index2, reaction2 in enumerate(self.core.reactions):
if isinstance(reaction2, PDepReaction) and reaction.reactants == reaction2.products and reaction.products == reaction2.reactants:
# We've found the PDepReaction for the reverse direction
dGrxn = reaction.getFreeEnergyOfReaction(300.)
kf = reaction.getRateCoefficient(1000,1e5)
kr = reaction.getRateCoefficient(1000,1e5) / reaction.getEquilibriumConstant(1000)
kf2 = reaction2.getRateCoefficient(1000,1e5) / reaction2.getEquilibriumConstant(1000)
kr2 = reaction2.getRateCoefficient(1000,1e5)
if kf / kf2 < 0.5 or kf / kf2 > 2.0:
# Most pairs of reactions should satisfy thermodynamic consistency (or at least be "close")
# Warn about the ones that aren't close (but don't abort)
logging.warning('Forward and reverse PDepReactions for reaction {0!s} generated from networks {1:d} and {2:d} do not satisfy thermodynamic consistency.'.format(reaction, reaction.network.index, reaction2.network.index))
logging.warning('{0!s}:'.format(reaction))
logging.warning('{0:.2e} {1:.2e}:'.format(kf, kf2))
logging.warning('{0!s}:'.format(reaction2))
logging.warning('{0:.2e} {1:.2e}:'.format(kr, kr2))
# Keep the exergonic direction
keepFirst = dGrxn < 0
# Delete the PDepReaction that we aren't keeping
if keepFirst:
self.core.reactions.remove(reaction2)
reaction.reversible = True
else:
self.core.reactions.remove(reaction)
self.core.reactions.remove(reaction2)
self.core.reactions.insert(index, reaction2)
reaction2.reversible = True
# There should be only one reverse, so we can stop searching once we've found it
break
else:
reaction.reversible = True
def loadSeedMechanism(self, path):
"""
Loads a seed mechanism from the folder indicated by `path` into the
core-edge reaction model.
"""
import os.path
import quantities as pq
import data
import thermo.data
import kinetics.data
import reaction
# Load the species data from the file species.txt
# This file has the format of a standard RMG dictionary
d = data.Dictionary()
d.load(os.path.join(path, 'species.txt'))
d.toStructure(addH=True)
# Load the thermo data from the file thermo.txt
# This file has the format of a standard RMG thermo library
thermoData = thermo.data.ThermoDatabase()
thermoData.load(os.path.join(path, 'species.txt'), '', os.path.join(path, 'thermo.txt'))
# Populate the main primary thermo library with this thermo data
# This will overwrite keys (but not values), so the order that the
# seed mechanisms are loaded matters!
for key, value in d.iteritems():
thermo.data.thermoDatabase.primaryDatabase.dictionary[key] = value
for key, value in thermoData.library.iteritems():
thermo.data.thermoDatabase.primaryDatabase.library[key] = value
# Create new species based on items in species.txt
seedSpeciesDict = {}; seedSpeciesList = []
for label, struct in d.iteritems():
spec, isNew = species.makeNewSpecies(struct, label, reactive=True)
seedSpeciesDict[label] = spec
seedSpeciesList.append(spec)
# Load the reactions from the file reaction.txt
seedReactionList = []
f = open(os.path.join(path, 'reactions.txt'), 'r')
for line in f:
line = data.removeCommentFromLine(line)
line.strip()
if len(line) > 0:
items = line.split()
if len(items) > 0:
rxn = items[0:-6]
# Extract reactants and products
if '<=>' in rxn: arrow = rxn.index('<=>')
elif '=>' in rxn: arrow = rxn.index('=>')
else: raise IOError('No arrow found in reaction equation from line {0}'.format(line))
reactants = rxn[0:arrow:2]
products = rxn[arrow+1::2]
# Remove third body 'M' if present
thirdBody = False
if 'M' in reactants and 'M' in products:
thirdBody = True
reactants.remove('M')
products.remove('M')
# Convert strings to species objects
reactants = [seedSpeciesDict[r] for r in reactants]
products = [seedSpeciesDict[r] for r in products]
reactants.sort()
products.sort()
# Process Arrhenius parameters
order = len(reactants)
if (thirdBody): order += 1
Aunits = 'cm^{0:d}/(mol^{1:d}*s)'.format(3*(order-1), order-1)
A = float(pq.Quantity(float(items[-6]), Aunits).simplified)
n = float(items[-5]) # dimensionless
Ea = float(pq.Quantity(float(items[-4]), 'cal/mol').simplified)
kin = [kinetics.model.Arrhenius(A=A, n=n, Ea=Ea)]
# Create reaction object and add to list
rxn = reaction.Reaction(id=0, reactants=reactants, products=products, family='seed', kinetics=kin, thirdBody=thirdBody)
rxn.reverse = reaction.Reaction(id=0, reactants=products, products=reactants, family='seed', kinetics=None, thirdBody=thirdBody)
rxn.reverse.reverse = rxn
reaction.processNewReaction(rxn)
seedReactionList.append(rxn)
f.close()
# Add species to core
for spec in seedSpeciesList:
self.addSpeciesToCore(spec)
# Add reactions to core
for rxn in seedReactionList:
self.addReactionToCore(rxn)
def markChemkinDuplicates(self):
"""
Check that all reactions that will appear the chemkin output have been checked as duplicates.
Call this if you've done something that may have introduced undetected duplicate reactions,
like add a reaction library or seed mechanism.
Anything added via the :meth:`expand` method should already be detected.
"""
from rmgpy.chemkin import markDuplicateReactions
rxnList = self.core.reactions + self.outputReactionList
markDuplicateReactions(rxnList)
def saveChemkinFile(self, path, verbose_path, dictionaryPath=None, transportPath=None, saveEdgeSpecies=False):
"""
Save a Chemkin file for the current model as well as any desired output
species and reactions to `path`.
"""
from rmgpy.chemkin import saveChemkinFile, saveSpeciesDictionary, saveTransportFile
speciesList = self.core.species + self.outputSpeciesList
rxnList = self.core.reactions + self.outputReactionList
saveChemkinFile(path, speciesList, rxnList, verbose = False, checkForDuplicates=False) # We should already have marked everything as duplicates by now
logging.info('Saving current model to verbose Chemkin file...')
saveChemkinFile(verbose_path, speciesList, rxnList, verbose = True, checkForDuplicates=False)
if dictionaryPath:
saveSpeciesDictionary(dictionaryPath, speciesList)
if transportPath:
saveTransportFile(transportPath, speciesList)
if saveEdgeSpecies == True:
speciesList = self.edge.species + self.outputSpeciesList
rxnList = self.edge.reactions + self.outputReactionList
saveChemkinFile(path, speciesList, rxnList, verbose = False, checkForDuplicates=False)
logging.info('Saving current edge to verbose Chemkin file...')
saveChemkinFile(verbose_path, speciesList, rxnList, verbose = True, checkForDuplicates=False)
if dictionaryPath:
saveSpeciesDictionary(dictionaryPath, speciesList)
def failsSpeciesConstraints(self, species):
"""
Pass in either a `Species` or `Molecule` object and checks whether it passes
the speciesConstraints set by the user. If not, returns `True` for failing speciesConstraints.
"""
explicitlyAllowedMolecules = self.speciesConstraints.get('explicitlyAllowedMolecules', [])
maxCarbonAtoms = self.speciesConstraints.get('maximumCarbonAtoms', 1000000)
maxHydrogenAtoms = self.speciesConstraints.get('maximumHydrogenAtoms', 1000000)
maxOxygenAtoms = self.speciesConstraints.get('maximumOxygenAtoms', 1000000)
maxNitrogenAtoms = self.speciesConstraints.get('maximumNitrogenAtoms', 1000000)
maxSiliconAtoms = self.speciesConstraints.get('maximumSiliconAtoms', 1000000)
maxSulfurAtoms = self.speciesConstraints.get('maximumSulfurAtoms', 1000000)
maxHeavyAtoms = self.speciesConstraints.get('maximumHeavyAtoms', 1000000)
maxRadicals = self.speciesConstraints.get('maximumRadicalElectrons', 1000000)
if isinstance(species, rmgpy.species.Species):
struct = species.molecule[0]
else:
# expects a molecule here
struct = species
for molecule in explicitlyAllowedMolecules:
if struct.isIsomorphic(molecule):
return False
H = struct.getNumAtoms('H')
if struct.getNumAtoms('C') > maxCarbonAtoms:
return True
if H > maxHydrogenAtoms:
return True
if struct.getNumAtoms('O') > maxOxygenAtoms:
return True
if struct.getNumAtoms('N') > maxNitrogenAtoms:
return True
if struct.getNumAtoms('Si') > maxSiliconAtoms:
return True
if struct.getNumAtoms('S') > maxSulfurAtoms:
return True
if len(struct.atoms) - H > maxHeavyAtoms:
return True
if (struct.getNumberOfRadicalElectrons() > maxRadicals):
return True
return False
|
KEHANG/RMG-Py
|
rmgpy/rmg/model.py
|
Python
|
mit
| 87,614
|
from fabric import api as fab
from contextlib import contextmanager
@contextmanager
def with_vagrant():
with fab.settings(user="vagrant",host_string="127.0.0.1:2222",key_filename=".vagrant/machines/minecraft/virtualbox/private_key"):
yield
def ping(ip):
with with_vagrant():
return fab.run('ping -c 4 {}'.format(ip))
def save():
with with_vagrant():
fab.sudo('/etc/init.d/minecraft backup')
fab.get(remote_path='/srv/minecraft-server/backups/*', local_path="/Users/e003070/Dropbox/minecraft_backups")
def restore():
with with_vagrant():
fab.put(remote_path='/srv/minecraft-server/backups/', local_path="/Users/e003070/Dropbox/minecraft_backups")
|
ekozlowski/ansible-minecraft
|
fabfile.py
|
Python
|
mit
| 717
|
# -*- encoding: utf8 -*-
# A daemon to keep SSH forwarding connected
from __future__ import print_function, absolute_import
import os
import sys
import time
import socket
import logging
class Daemon(object):
def __init__(self):
self.heartbeat = 50
def run(self):
logging.basicConfig(filename='daemon.log')
logging.error('daemon started')
self.daemonize()
while True:
if not self.check_connection():
self.reconnect()
logging.warn('reconnecting')
time.sleep(self.heartbeat)
def check_connection(self):
c = socket.socket()
try:
c.connect(('localhost', 3366))
c.close()
return True
except socket.error:
return False
def daemonize(self):
pid = os.fork()
if pid:
os.waitpid(pid, os.WNOHANG)
sys.exit(0)
return
def reconnect(self):
pid = os.fork()
if pid == 0: # child
err = os.execlp('/usr/bin/ssh', 'ssh', '-i',
'/home/xu/.ssh/id_rsa', '-L',
'3366:127.0.0.1:3306', '-p', '42022', 'xu@abc.com')
if err:
logging.error("error to execlp")
sys.exit(1)
elif pid > 0:
os.waitpid(pid, 0)
else:
logging.error('error to fork')
sys.exit(2)
if __name__ == '__main__':
Daemon().run()
|
dlutxx/memo
|
python/daemon.py
|
Python
|
mit
| 1,497
|
UL_CATEGORY_LI = '//ul[@class="category"]/li'
H2_A_TITLELINK = './h2/a[@class="titlelink"]'
SPAN_A_TITLELINK = './span/a[@class="titlelink"]'
DIV_BODYFIELD_P = '//div[contains(@class,"bodyfield")]/p'
CATEGORY_H2_XPATH = [ UL_CATEGORY_LI, H2_A_TITLELINK ]
BODYFIELD_SPAN_XPATH = [ DIV_BODYFIELD_P, SPAN_A_TITLELINK ]
"""Mapping of relative URL (for EOPSS pages) to the xpath needed
to extract documents (1st xpath for section, 2nd xpath for document link)
"""
MASSGOV_DICT = {
'homeland-sec/grants/docs/':
[
UL_CATEGORY_LI,
'./h2/span/a[@class="titlelink"]'
],
'homeland-sec/grants/hs-grant-guidance-and-policies.html':
BODYFIELD_SPAN_XPATH,
'homeland-sec/grants/standard-documents.html':
[
'//div[contains(@class,"bodyfield")]/ul/li',
SPAN_A_TITLELINK
],
'law-enforce/grants/': CATEGORY_H2_XPATH,
'law-enforce/grants/2017-muni-public-safety-staffing-grant.html':
BODYFIELD_SPAN_XPATH,
'law-enforce/grants/le-grants-public-records.html':
BODYFIELD_SPAN_XPATH,
'justice-and-prev/grants/': CATEGORY_H2_XPATH,
'justice-and-prev/grants/bgp/': CATEGORY_H2_XPATH,
'hwy-safety/grants/': CATEGORY_H2_XPATH,
'hwy-safety/grants/ffy-2017-traffic-enforcement-grant-program.html':
BODYFIELD_SPAN_XPATH,
'hwy-safety/grants/ffy2017-hsd-grant-opportunities.html':
BODYFIELD_SPAN_XPATH,
'hwy-safety/grants/ffy-2017-step.html': BODYFIELD_SPAN_XPATH,
'hwy-safety/grants/highway-safety-grants-public-records.html':
BODYFIELD_SPAN_XPATH
}
|
RagtagOpen/bidwire
|
bidwire/scrapers/massgov/url_scraper_dict.py
|
Python
|
mit
| 1,717
|
from flask_wtf import Form
from flask_wtf.file import FileRequired, FileAllowed, FileField
from wtforms import StringField, BooleanField, PasswordField, TextAreaField
from wtforms.validators import DataRequired, Email, Length
class SignUpForm(Form):
username = StringField('username', validators=[DataRequired(), Length(max=64)])
# password = PasswordField('password', validators=[DataRequired(), Length(max=50)])
email = StringField('email', validators=[DataRequired(), Email(), Length(max=120)])
first_name = StringField('first_name', validators=[DataRequired(), Length(max=50)])
last_name = StringField('last_name', validators=[DataRequired(), Length(max=50)])
class LoginForm(Form):
username = StringField('username', validators=[DataRequired(), Length(max=50)])
password = PasswordField('password', validators=[DataRequired(), Length(max=50)])
remember_me = BooleanField('remember_me', default=False)
class PostForm(Form):
content = TextAreaField('content', validators=[DataRequired()])
class UploadPostForm(Form):
file = FileField('post', validators=[FileRequired(), FileAllowed(['md'], 'Only Markdown files!')])
overwrite = BooleanField('overwrite', default=False)
|
andersbogsnes/blog
|
app/forms.py
|
Python
|
mit
| 1,224
|
from .fb2_parser import FB2NoteParser
from .pdf_parser import PDFNoteParser
from .stat_parser import StatsAccessor
__all__ = ("FB2NoteParser", "PDFNoteParser", "StatsAccessor")
|
MrLokans/MoonReader_tools
|
moonreader_tools/parsers/__init__.py
|
Python
|
mit
| 178
|
# /ciscripts/check/python/__init__.py
#
# Module loader file for /ciscripts/check/python.
#
# See /LICENCE.md for Copyright information
"""Module loader file for /ciscripts/check/python."""
|
polysquare/polysquare-ci-scripts
|
ciscripts/check/python/__init__.py
|
Python
|
mit
| 190
|
from merc.actor import Actor
from merc.collision import Collision
from merc.player import Player
from pprint import pprint
import sys
import json
import numpy
import itertools
from PIL import Image, ImageDraw, ImageColor
class Game:
NUM_TWEENS = 10
def __init__(self, data):
"""
`data` should contain the JSON output of Octane
"""
self.__dict__ = data
self.frame = None
self.tween = 0
self.seconds_remaining = 0
self.actors = {}
self.players = {}
self.ball_actor = None
self.grouped_actors = {}
def processFrames(self):
"""
Step through the frames one by one. Build the actors, update the game state,
link the actors, and generate stats.
"""
for frame in self.Frames:
self.frame = frame
for id, data in frame['Spawned'].items():
self.actors[id] = Actor(id, data)
for id, data in frame['Updated'].items():
self.actors[id].update(data, self.frame['Number'])
self.updateState()
self.linkActors()
#self.checkCollisions()
def updateState(self):
"""
Update the game state. Creates a sort of cache to help find commonly used stuff.
"""
self.ball_actor = None
self.grouped_actors = {}
for actor in self.actors.values():
actor_class = actor.getClass()
if actor_class == 'TAGame.GameEvent_Soccar_TA':
# shortcut for the time remaining
s = actor.getProp('TAGame.GameEvent_Soccar_TA:SecondsRemaining', -1)
if s >= 0:
self.seconds_remaining = s
elif actor_class == 'TAGame.Ball_TA':
# shortcut to find the ball actor
self.ball_actor = actor
else:
# group similar actors together
if not actor_class in self.grouped_actors:
self.grouped_actors[actor_class] = []
self.grouped_actors[actor_class].append(actor)
def linkActors(self):
"""
Some actors have relationships with each other, so we set those relationships here.
"""
'''
components -> car -> pri -> team
'''
# link pri -> team
if 'TAGame.PRI_TA' in self.grouped_actors:
for pri_actor in self.grouped_actors['TAGame.PRI_TA']:
if hasattr(pri_actor, 'team'):
continue
team_prop = pri_actor.getProp('Engine.PlayerReplicationInfo:Team')
if not team_prop:
continue
pri_actor.team = self.findActor(team_prop[1])
# link components to car
components = [
'TAGame.CarComponent_Boost_TA',
'TAGame.CarComponent_Jump_TA',
'TAGame.CarComponent_DoubleJump_TA',
'TAGame.CarComponent_Dodge_TA',
'TAGame.CarComponent_FlipCar_TA',
]
for component in components:
if component in self.grouped_actors:
for component_actor in self.grouped_actors[component]:
if hasattr(component_actor, 'car'):
continue
car_prop = component_actor.getProp('TAGame.CarComponent_TA:Vehicle')
if not car_prop:
continue
component_actor.car = self.findActor(car_prop[1])
if not component_actor.car:
continue
if not hasattr(component_actor.car, 'components'):
component_actor.car.components = []
if 'TAGame.Car_TA' in self.grouped_actors:
# link car -> pri
for car_actor in self.grouped_actors['TAGame.Car_TA']:
if hasattr(car_actor, 'pri'):
continue
pri_prop = car_actor.getProp('Engine.Pawn:PlayerReplicationInfo')
if not pri_prop:
continue
car_actor.pri = self.findActor(pri_prop[1])
# create / update players
for car_actor in self.grouped_actors['TAGame.Car_TA']:
player_id = car_actor.getPlayerId()
if not player_id:
continue
if player_id not in self.players:
self.players[player_id] = Player(player_id)
self.players[player_id].update(car_actor)
def findActor(self, find_actor_id):
"""
Attempts to find and return an actor with the given `find_actor_id`. Returns
None when the actor cannot be found.
"""
find_actor_id = int(find_actor_id)
for actor_id, actor in self.actors.items():
if int(actor_id) == find_actor_id:
return actor
return None
def checkCollisions(self):
"""
Determine when and where each collision happened during this game. Save
the collision data in `self.players`.
"""
if 'TAGame.Car_TA' not in self.grouped_actors:
# no need to check collisions when no cars exist
return
# each frame, we only want to check actors that are within Collisions.FRAME_CHECK_RADIUS
# units of each other
# each frame, we only want to tween if anyone is within Collisions.FRAME_CHECK_RADIUS
# units of each other
# create tuples of actors that we want to check this frame
pairs = []
ball = self.ball_actor
for car in self.grouped_actors['TAGame.Car_TA']:
# we dont want to check cars that arent linked with players yet
player_id = car.getPlayerId()
if not player_id:
continue
player = self.players[player_id]
# check if the last collision with the ball was within a certain number of frames
# if it is, we should skip this pair
last_collision = player.getLastCollisionWithActor(ball.id)
if last_collision and last_collision.frame_number > self.frame['Number'] - Collision.MIN_FRAMES_BETWEEN:
continue
# skip if the distance is over the limit
dist = self.distance(ball, car)
if not dist:
continue
if dist > Collision.FRAME_CHECK_RADIUS:
continue
pairs.append((ball, car))
if len(pairs) <= 0:
# only tween if any pairs need to be checked
return
self.tween = 0
# save which actors have collided
collided = []
for i in range(self.NUM_TWEENS):
for actor1, actor2 in pairs:
# combine actor ids into a key for faster lookup
key = actor1.id + actor2.id * 1024
if key in collided:
# dont allow multiple collisions between the same actors per frame
continue
# determine the check radius
check_radius = Collision.CAR_AND_BALL_RADIUS
if actor1.isClass('TAGame.Car_TA'):
if actor2.isClass('TAGame.Car_TA'):
check_radius = Collision.CAR_AND_CAR_RADIUS
else:
check_radius = Collision.CAR_AND_BALL_RADIUS
collision = self.collides(actor1, actor2, check_radius)
if collision:
self.handleCollision(actor1, actor2, collision)
collided.append(key)
self.tween += 1
self.tween = 0
def handleCollision(self, actor1, actor2, collision):
"""
Handles a single collision between two actors.
"""
if (actor1.isClass('TAGame.Car_TA')):
player_id = actor1.getPlayerId()
if player_id:
self.players[player_id].addCollision(collision)
if (actor2.isClass('TAGame.Car_TA')):
player_id = actor2.getPlayerId()
if player_id:
self.players[player_id].addCollision(collision)
print("*** Collision! ***", self.seconds_remaining, self.frame['Number'], self.tween, "[{0}] x [{1}]".format(actor1.getName(), actor2.getName()), collision.point)
def distance(self, actor1, actor2, return_midpoint=False):
"""
Returns the distance between two actors. Optionally also returns the midpoint
between those two actors.
"""
rval = False
if return_midpoint:
rval = (False, False)
rb1 = actor1.getRB(self.frame['Number'], self.tween, self.NUM_TWEENS)
rb2 = actor2.getRB(self.frame['Number'], self.tween, self.NUM_TWEENS)
if not rb1 or not rb2:
return rval
p1 = numpy.array(rb1['Position'])
p2 = numpy.array(rb2['Position'])
dist = numpy.linalg.norm(p1 - p2)
if return_midpoint:
return (dist, numpy.median([p1, p2], axis=0))
return dist
def collides(self, actor1, actor2, check_radius):
"""
Returns a Collision if the two actors intersect. Otherwise returns False.
"""
(dist, midpoint) = self.distance(actor1, actor2, True)
if not dist:
return False
if dist > check_radius + Collision.TOLERANCE:
return False
shape1 = actor1.getShape(self.frame['Number'], self.tween, self.NUM_TWEENS)
shape2 = actor2.getShape(self.frame['Number'], self.tween, self.NUM_TWEENS)
if not shape1 or not shape2:
return False
if shape1.intersects(shape2, Collision.TOLERANCE):
return Collision(midpoint, self.frame['Number'], actor1.id, actor2.id)
return False
|
rustyfausak/merc
|
merc/old/game.py
|
Python
|
mit
| 7,938
|
"""Train ILSVRC2017 Data using homemade scripts."""
import cv2
import os
import math
import tensorflow as tf
from multiprocessing import Process, Queue
import os
import sys
FILE_DIR = os.path.dirname(__file__)
sys.path.append(FILE_DIR + '/../')
import config as cfg
from img_dataset.ilsvrc2017_cls_multithread import ilsvrc_cls
from yolo2_nets.darknet import darknet19
from yolo2_nets.net_utils import get_ordered_ckpts
from utils.timer import Timer
slim = tf.contrib.slim
def get_validation_process(imdb, queue_in, queue_out):
"""Get validation dataset. Run in a child process."""
while True:
queue_in.get()
images, labels = imdb.get()
queue_out.put([images, labels])
imdb = ilsvrc_cls('train', data_aug=True, multithread=cfg.MULTITHREAD)
val_imdb = ilsvrc_cls('val', batch_size=64)
# set up child process for getting validation data
queue_in = Queue()
queue_out = Queue()
val_data_process = Process(target=get_validation_process,
args=(val_imdb, queue_in, queue_out))
val_data_process.start()
queue_in.put(True) # start getting the first batch
CKPTS_DIR = cfg.get_ckpts_dir('darknet19', imdb.name)
TENSORBOARD_TRAIN_DIR, TENSORBOARD_VAL_DIR = cfg.get_output_tb_dir(
'darknet19', imdb.name)
input_data = tf.placeholder(tf.float32, [None, 224, 224, 3])
label_data = tf.placeholder(tf.int32, None)
is_training = tf.placeholder(tf.bool)
logits = darknet19(input_data, is_training=is_training)
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=label_data, logits=logits)
loss = tf.reduce_mean(loss)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
# train_op = tf.train.AdamOptimizer(0.0005).minimize(loss)
train_op = tf.train.MomentumOptimizer(0.001, 0.9).minimize(loss)
correct_pred = tf.equal(tf.cast(tf.argmax(logits, 1), tf.int32), label_data)
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
tf.summary.scalar('loss', loss)
tf.summary.scalar('accuracy', accuracy)
######################
# Initialize Session #
######################
tfconfig = tf.ConfigProto(allow_soft_placement=True)
tfconfig.gpu_options.allow_growth = True
sess = tf.Session(config=tfconfig)
merged = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(TENSORBOARD_TRAIN_DIR)
val_writer = tf.summary.FileWriter(TENSORBOARD_VAL_DIR)
# # initialize variables, assume all vars are new now
# init_op = tf.global_variables_initializer()
# sess.run(init_op)
# load previous models
ckpts = get_ordered_ckpts(sess, imdb, 'darknet19')
variables_to_restore = slim.get_variables_to_restore()
# # change optimizer
# print('Initializing variables for the new optimizer')
# optimzer_vars = [var for var in tf.global_variables()
# if "Momentum" in var.name]
# init_op = tf.variables_initializer(optimzer_vars)
# sess.run(init_op)
# for var in optimzer_vars:
# if var in variables_to_restore:
# variables_to_restore.remove(var)
print('Restorining model snapshots from {:s}'.format(ckpts[-1]))
old_saver = tf.train.Saver(variables_to_restore)
old_saver.restore(sess, str(ckpts[-1]))
print('Restored.')
fnames = ckpts[-1].split('_')
old_epoch = int(fnames[-1][:-5])
imdb.epoch = old_epoch + 1
# simple model saver
cur_saver = tf.train.Saver()
T = Timer()
for i in range(imdb.total_batch * 10 + 1):
T.tic()
images, labels = imdb.get()
_, loss_value, acc_value, train_summary = sess.run(
[train_op, loss, accuracy, merged], {input_data: images, label_data: labels, is_training: 1})
_time = T.toc(average=False)
print('epoch {:d}, iter {:d}/{:d}, training loss: {:.3}, training acc: {:.3}, take {:.2}s'
.format(imdb.epoch, (i + 1) % imdb.total_batch,
imdb.total_batch, loss_value, acc_value, _time))
if (i + 1) % 25 == 0:
T.tic()
val_images, val_labels = queue_out.get()
val_loss_value, val_acc_value, val_summary = sess.run(
[loss, accuracy, merged], {input_data: val_images, label_data: val_labels, is_training: 0})
_val_time = T.toc(average=False)
print('###validation loss: {:.3}, validation acc: {:.3}, take {:.2}s'
.format(val_loss_value, val_acc_value, _val_time))
queue_in.put(True)
global_step = imdb.epoch * imdb.total_batch + (i % imdb.total_batch)
train_writer.add_summary(train_summary, global_step)
val_writer.add_summary(val_summary, global_step)
if (i % (imdb.total_batch * 2) == 0):
save_path = cur_saver.save(sess, os.path.join(
CKPTS_DIR,
cfg.TRAIN_SNAPSHOT_PREFIX + '_epoch_' + str(imdb.epoch - 1) + '.ckpt'))
print("Model saved in file: %s" % save_path)
# terminate child processes
if cfg.MULTITHREAD:
imdb.close_all_processes()
queue_in.cancel_join_thread()
queue_out.cancel_join_thread()
val_data_process.terminate()
|
wenxichen/tensorflow_yolo2
|
src/imagenet/imagenet_train_darknet.py
|
Python
|
mit
| 4,911
|
import xml.sax
import xml.sax.handler
import types
try:
_StringTypes = [types.StringType, types.UnicodeType]
except AttributeError:
_StringTypes = [types.StringType]
START_ELEMENT = "START_ELEMENT"
END_ELEMENT = "END_ELEMENT"
COMMENT = "COMMENT"
START_DOCUMENT = "START_DOCUMENT"
END_DOCUMENT = "END_DOCUMENT"
PROCESSING_INSTRUCTION = "PROCESSING_INSTRUCTION"
IGNORABLE_WHITESPACE = "IGNORABLE_WHITESPACE"
CHARACTERS = "CHARACTERS"
class PullDOM(xml.sax.ContentHandler):
_locator = None
document = None
def __init__(self, documentFactory=None):
self.documentFactory = documentFactory
self.firstEvent = [None, None]
self.lastEvent = self.firstEvent
self.elementStack = []
self.push = self.elementStack.append
try:
self.pop = self.elementStack.pop
except AttributeError:
# use class' pop instead
pass
self._ns_contexts = [{}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self.pending_events = []
def pop(self):
result = self.elementStack[-1]
del self.elementStack[-1]
return result
def setDocumentLocator(self, locator):
self._locator = locator
def startPrefixMapping(self, prefix, uri):
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix or ''
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts.pop()
def startElementNS(self, name, tagName , attrs):
uri, localname = name
if uri:
# When using namespaces, the reader may or may not
# provide us with the original name. If not, create
# *a* valid tagName from the current context.
if tagName is None:
prefix = self._current_context[uri]
if prefix:
tagName = prefix + ":" + localname
else:
tagName = localname
if self.document:
node = self.document.createElementNS(uri, tagName)
else:
node = self.buildDocument(uri, tagName)
else:
# When the tagname is not prefixed, it just appears as
# localname
if self.document:
node = self.document.createElement(localname)
else:
node = self.buildDocument(None, localname)
for aname,value in attrs.items():
a_uri, a_localname = aname
if a_uri:
prefix = self._current_context[a_uri]
if prefix:
qname = prefix + ":" + a_localname
else:
qname = a_localname
attr = self.document.createAttributeNS(a_uri, qname)
node.setAttributeNodeNS(attr)
else:
attr = self.document.createAttribute(a_localname)
node.setAttributeNode(attr)
attr.value = value
self.lastEvent[1] = [(START_ELEMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
def endElementNS(self, name, tagName):
self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
self.lastEvent = self.lastEvent[1]
def startElement(self, name, attrs):
if self.document:
node = self.document.createElement(name)
else:
node = self.buildDocument(None, name)
for aname,value in attrs.items():
attr = self.document.createAttribute(aname)
attr.value = value
node.setAttributeNode(attr)
self.lastEvent[1] = [(START_ELEMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
def endElement(self, name):
self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
self.lastEvent = self.lastEvent[1]
def comment(self, s):
if self.document:
node = self.document.createComment(s)
self.lastEvent[1] = [(COMMENT, node), None]
self.lastEvent = self.lastEvent[1]
else:
event = [(COMMENT, s), None]
self.pending_events.append(event)
def processingInstruction(self, target, data):
if self.document:
node = self.document.createProcessingInstruction(target, data)
self.lastEvent[1] = [(PROCESSING_INSTRUCTION, node), None]
self.lastEvent = self.lastEvent[1]
else:
event = [(PROCESSING_INSTRUCTION, target, data), None]
self.pending_events.append(event)
def ignorableWhitespace(self, chars):
node = self.document.createTextNode(chars)
self.lastEvent[1] = [(IGNORABLE_WHITESPACE, node), None]
self.lastEvent = self.lastEvent[1]
def characters(self, chars):
node = self.document.createTextNode(chars)
self.lastEvent[1] = [(CHARACTERS, node), None]
self.lastEvent = self.lastEvent[1]
def startDocument(self):
if self.documentFactory is None:
import xml.dom.minidom
self.documentFactory = xml.dom.minidom.Document.implementation
def buildDocument(self, uri, tagname):
# Can't do that in startDocument, since we need the tagname
# XXX: obtain DocumentType
node = self.documentFactory.createDocument(uri, tagname, None)
self.document = node
self.lastEvent[1] = [(START_DOCUMENT, node), None]
self.lastEvent = self.lastEvent[1]
self.push(node)
# Put everything we have seen so far into the document
for e in self.pending_events:
if e[0][0] == PROCESSING_INSTRUCTION:
_,target,data = e[0]
n = self.document.createProcessingInstruction(target, data)
e[0] = (PROCESSING_INSTRUCTION, n)
elif e[0][0] == COMMENT:
n = self.document.createComment(e[0][1])
e[0] = (COMMENT, n)
else:
raise AssertionError("Unknown pending event ",e[0][0])
self.lastEvent[1] = e
self.lastEvent = e
self.pending_events = None
return node.firstChild
def endDocument(self):
self.lastEvent[1] = [(END_DOCUMENT, self.document), None]
self.pop()
def clear(self):
"clear(): Explicitly release parsing structures"
self.document = None
class ErrorHandler:
def warning(self, exception):
print exception
def error(self, exception):
raise exception
def fatalError(self, exception):
raise exception
class DOMEventStream:
def __init__(self, stream, parser, bufsize):
self.stream = stream
self.parser = parser
self.bufsize = bufsize
self.reset()
def reset(self):
self.pulldom = PullDOM()
# This content handler relies on namespace support
self.parser.setFeature(xml.sax.handler.feature_namespaces, 1)
self.parser.setContentHandler(self.pulldom)
def __getitem__(self, pos):
rc = self.getEvent()
if rc:
return rc
raise IndexError
def expandNode(self, node):
event = self.getEvent()
parents = [node]
while event:
token, cur_node = event
if cur_node is node:
return
if token != END_ELEMENT:
parents[-1].appendChild(cur_node)
if token == START_ELEMENT:
parents.append(cur_node)
elif token == END_ELEMENT:
del parents[-1]
event = self.getEvent()
def getEvent(self):
if not self.pulldom.firstEvent[1]:
self.pulldom.lastEvent = self.pulldom.firstEvent
while not self.pulldom.firstEvent[1]:
buf = self.stream.read(self.bufsize)
if not buf:
self.parser.close()
return None
self.parser.feed(buf)
rc = self.pulldom.firstEvent[1][0]
self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1]
return rc
def clear(self):
"clear(): Explicitly release parsing objects"
self.pulldom.clear()
del self.pulldom
self.parser = None
self.stream = None
class SAX2DOM(PullDOM):
def startElementNS(self, name, tagName , attrs):
PullDOM.startElementNS(self, name, tagName, attrs)
curNode = self.elementStack[-1]
parentNode = self.elementStack[-2]
parentNode.appendChild(curNode)
def startElement(self, name, attrs):
PullDOM.startElement(self, name, attrs)
curNode = self.elementStack[-1]
parentNode = self.elementStack[-2]
parentNode.appendChild(curNode)
def processingInstruction(self, target, data):
PullDOM.processingInstruction(self, target, data)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
def ignorableWhitespace(self, chars):
PullDOM.ignorableWhitespace(self, chars)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
def characters(self, chars):
PullDOM.characters(self, chars)
node = self.lastEvent[0][1]
parentNode = self.elementStack[-1]
parentNode.appendChild(node)
default_bufsize = (2 ** 14) - 20
def parse(stream_or_string, parser=None, bufsize=None):
if bufsize is None:
bufsize = default_bufsize
if type(stream_or_string) in _StringTypes:
stream = open(stream_or_string)
else:
stream = stream_or_string
if not parser:
parser = xml.sax.make_parser()
return DOMEventStream(stream, parser, bufsize)
def parseString(string, parser=None):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
bufsize = len(string)
buf = StringIO(string)
if not parser:
parser = xml.sax.make_parser()
return DOMEventStream(buf, parser, bufsize)
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-2.1/Lib/xml/dom/pulldom.py
|
Python
|
mit
| 10,143
|
from django.conf import settings
IMAGE_URL = getattr(settings, 'AWARDS_IMAGE_URL', 'icons/awards/{slug}.png')
|
fgmacedo/django-awards
|
awards/settings.py
|
Python
|
mit
| 112
|
# Copyright (c) 2015 Daniel Garcia
#
# See the file LICENSE.txt for copying permission.
import unittest
import random
from gameboard.gameboard import Gameboard, Direction
from gameboard.coordinate import Coordinate
class TestBoard(unittest.TestCase):
def setUp(self):
self.board = Gameboard()
def test_64_squares(self):
self.assertEqual(len(self.board._squares),64)
def test_index_raises_TypeError(self):
self.assertRaises(TypeError,self.board._indexOf,"notCoordinate")
def test_index_of_coordinate_corner(self):
index = self.board._indexOf(Coordinate.a1)
self.assertEqual(index,(7,0))
def test_index_of_coordinate_top(self):
index = self.board._indexOf(Coordinate.c8)
self.assertEqual(index,(0,2))
def test_index_of_coordinate_right(self):
index = self.board._indexOf(Coordinate.h7)
self.assertEqual(index,(1,7))
def test_index_of_coordinate_bottom(self):
index = self.board._indexOf(Coordinate.d1)
self.assertEqual(index,(7,3))
def test_index_of_coordinate_left(self):
index = self.board._indexOf(Coordinate.a5)
self.assertEqual(index,(3,0))
def test_index_of_coordinate_center(self):
index = self.board._indexOf(Coordinate.d3)
self.assertEqual(index,(5,3))
def test_neighbor_top_with_top_square(self):
n = self.board._neighbor_top(Coordinate.a8)
self.assertEqual(n, None)
def test_neighbor_top(self):
n = self.board._neighbor_top(Coordinate.d3)
self.assertEqual(n, Coordinate.d4)
def test_neighbor_top_right_with_top_square(self):
n = self.board._neighbor_top_right(Coordinate.b8)
self.assertEqual(n, None)
def test_neighbor_top_right_with_right_square(self):
n = self.board._neighbor_top_right(Coordinate.h5)
self.assertEqual(n, None)
def test_neighbor_top_right(self):
n = self.board._neighbor_top_right(Coordinate.f3)
self.assertEqual(n, Coordinate.g4)
def test_neighbor_right_with_right_square(self):
n = self.board._neighbor_right(Coordinate.h5)
self.assertEqual(n, None)
def test_neighbor_right(self):
n = self.board._neighbor_right(Coordinate.d3)
self.assertEqual(n, Coordinate.e3)
def test_neighbor_btm_right_with_btm_square(self):
n = self.board._neighbor_btm_right(Coordinate.b1)
self.assertEqual(n, None)
def test_neighbor_btm_right_with_right_square(self):
n = self.board._neighbor_btm_right(Coordinate.h5)
self.assertEqual(n, None)
def test_neighbor_btm_right(self):
n = self.board._neighbor_btm_right(Coordinate.f3)
self.assertEqual(n, Coordinate.g2)
def test_neighbor_btm_with_btm_square(self):
n = self.board._neighbor_btm(Coordinate.a1)
self.assertEqual(n, None)
def test_neighbor_btm(self):
n = self.board._neighbor_btm(Coordinate.d3)
self.assertEqual(n, Coordinate.d2)
def test_neighbor_btm_left_with_btm_square(self):
n = self.board._neighbor_btm_left(Coordinate.b1)
self.assertEqual(n, None)
def test_neighbor_btm_left_with_left_square(self):
n = self.board._neighbor_btm_left(Coordinate.a6)
self.assertEqual(n, None)
def test_neighbor_btm_left(self):
n = self.board._neighbor_btm_left(Coordinate.f3)
self.assertEqual(n, Coordinate.e2)
def test_neighbor_left_with_left_square(self):
n = self.board._neighbor_left(Coordinate.a5)
self.assertEqual(n, None)
def test_neighbor_left(self):
n = self.board._neighbor_left(Coordinate.d3)
self.assertEqual(n, Coordinate.c3)
def test_neighbor_top_left_with_top_square(self):
n = self.board._neighbor_top_left(Coordinate.b8)
self.assertEqual(n, None)
def test_neighbor_top_left_with_left_square(self):
n = self.board._neighbor_top_left(Coordinate.a5)
self.assertEqual(n, None)
def test_neighbor_top_left(self):
n = self.board._neighbor_top_left(Coordinate.f3)
self.assertEqual(n, Coordinate.e4)
def test_neighbor_in_direction_raises_TypeError(self):
self.assertRaises(TypeError, self.board.neighbor_in_direction,
square = "notCoordinate",
direction = Direction.top)
self.assertRaises(TypeError, self.board.neighbor_in_direction,
square = Coordinate.a1,
direction = "notDirection")
def test_neighbor_in_direction_top(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.top)
self.assertEqual(n, Coordinate.d4)
def test_neighbor_in_direction_top_right(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.top_right)
self.assertEqual(n, Coordinate.e4)
def test_neighbor_in_direction_right(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.right)
self.assertEqual(n, Coordinate.e3)
def test_neighbor_in_direction_btm_right(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.btm_right)
self.assertEqual(n, Coordinate.e2)
def test_neighbor_in_direction_btm(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.btm)
self.assertEqual(n, Coordinate.d2)
def test_neighbor_in_directino_btm_left(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.btm_left)
self.assertEqual(n, Coordinate.c2)
def test_neighbor_in_direction_left(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.left)
self.assertEqual(n, Coordinate.c3)
def test_neighbor_in_direction_top_left(self):
n = self.board.neighbor_in_direction(Coordinate.d3, Direction.top_left)
self.assertEqual(n, Coordinate.c4)
def test_neighbors_raises_TypeError(self):
self.assertRaises(TypeError,self.board.neighbors,"notCoordinate")
def test_neighbors_corner(self):
n = self.board.neighbors(Coordinate.a1)
correct = {Direction.top: Coordinate.a2,
Direction.top_right: Coordinate.b2,
Direction.right: Coordinate.b1,
Direction.btm_right: None,
Direction.btm: None,
Direction.btm_left: None,
Direction.left: None,
Direction.top_left: None}
self.assertEqual(n, correct)
def test_neighbors_top(self):
n = self.board.neighbors(Coordinate.c8)
correct = {Direction.top: None,
Direction.top_right: None,
Direction.right: Coordinate.d8,
Direction.btm_right: Coordinate.d7,
Direction.btm: Coordinate.c7,
Direction.btm_left: Coordinate.b7,
Direction.left: Coordinate.b8,
Direction.top_left: None}
self.assertEqual(n, correct)
def test_neighbors_right(self):
n = self.board.neighbors(Coordinate.h7)
correct = {Direction.top: Coordinate.h8,
Direction.top_right: None,
Direction.right: None,
Direction.btm_right: None,
Direction.btm: Coordinate.h6,
Direction.btm_left: Coordinate.g6,
Direction.left: Coordinate.g7,
Direction.top_left: Coordinate.g8}
self.assertEqual(n, correct)
def test_neighbors_bottom(self):
n = self.board.neighbors(Coordinate.d1)
correct = {Direction.top: Coordinate.d2,
Direction.top_right: Coordinate.e2,
Direction.right: Coordinate.e1,
Direction.btm_right: None,
Direction.btm: None,
Direction.btm_left: None,
Direction.left: Coordinate.c1,
Direction.top_left: Coordinate.c2}
self.assertEqual(n, correct)
def test_neighbors_left(self):
n = self.board.neighbors(Coordinate.a5)
correct = {Direction.top: Coordinate.a6,
Direction.top_right: Coordinate.b6,
Direction.right: Coordinate.b5,
Direction.btm_right: Coordinate.b4,
Direction.btm: Coordinate.a4,
Direction.btm_left: None,
Direction.left: None,
Direction.top_left: None}
self.assertEqual(n, correct)
def test_neighbors_center(self):
n = self.board.neighbors(Coordinate.d3)
correct = {Direction.top: Coordinate.d4,
Direction.top_right: Coordinate.e4,
Direction.right: Coordinate.e3,
Direction.btm_right: Coordinate.e2,
Direction.btm: Coordinate.d2,
Direction.btm_left: Coordinate.c2,
Direction.left: Coordinate.c3,
Direction.top_left: Coordinate.c4}
self.assertEqual(n, correct)
def test_squares_in_direction_raises_TypeError(self):
self.assertRaises(TypeError,self.board.squares_in_direction,
origin = "notCoordinate",
direction = Direction.top)
self.assertRaises(TypeError, self.board.squares_in_direction,
origin = Coordinate.a1,
direction = "notDirection")
def test_squares_in_direction_center_to_edge(self):
s = self.board.squares_in_direction(Coordinate.d4, Direction.top)
correct = [Coordinate.d5,
Coordinate.d6,
Coordinate.d7,
Coordinate.d8]
self.assertEqual(s, correct)
def test_squares_in_direction_along_edge(self):
s = self.board.squares_in_direction(Coordinate.a5, Direction.btm)
correct = [Coordinate.a4,
Coordinate.a3,
Coordinate.a2,
Coordinate.a1]
self.assertEqual(s, correct)
def test_squares_in_direction_edge_to_outside(self):
s = self.board.squares_in_direction(Coordinate.a1, Direction.left)
self.assertEqual(s, [])
def test_squares_in_direction_with_non_empty_square_ignored(self):
self.board.set_content(Coordinate.g5, "boo")
s = self.board.squares_in_direction(Coordinate.a5, Direction.right)
correct = [Coordinate.b5,
Coordinate.c5,
Coordinate.d5,
Coordinate.e5,
Coordinate.f5]
self.assertEqual(s, correct)
def test_squares_in_direction_with_non_empty_square_included(self):
self.board.set_content(Coordinate.g5, "boo")
s = self.board.squares_in_direction(Coordinate.d2, Direction.top_right,
include_last_non_empty_square = True)
correct = [Coordinate.e3,
Coordinate.f4,
Coordinate.g5]
self.assertEqual(s, correct)
def test_path_in_direction_raises_TypeError(self):
self.assertRaises(TypeError, self.board.path_in_direction,
origin = "notCoordinate",
destination = Coordinate.a3,
direction = Direction.top)
self.assertRaises(TypeError, self.board.path_in_direction,
origin = Coordinate.a1,
destination = "notCoordinate",
direction = Direction.btm)
self.assertRaises(TypeError, self.board.path_in_direction,
origin = Coordinate.a1,
destination = Coordinate.b4,
direction = "notDirection")
def test_path_in_direction_top(self):
path = self.board.path_in_direction(Coordinate.d3, Coordinate.d7, \
Direction.top)
correctPath = [Coordinate.d4,
Coordinate.d5,
Coordinate.d6]
self.assertEqual(path, correctPath)
def test_path_in_direction_top_right(self):
path = self.board.path_in_direction(Coordinate.a1, Coordinate.h8, \
Direction.top_right)
correctPath = [Coordinate.b2,
Coordinate.c3,
Coordinate.d4,
Coordinate.e5,
Coordinate.f6,
Coordinate.g7]
self.assertEqual(path, correctPath)
def test_path_in_direction_unreachable(self):
path = self.board.path_in_direction(Coordinate.a8, Coordinate.h8, \
Direction.btm)
correctPath = []
self.assertEqual(path, correctPath)
def test_path_in_direction_out_of_bounds(self):
path = self.board.path_in_direction(Coordinate.a8, Coordinate.a1, \
Direction.left)
correctPath = []
self.assertEqual(path, correctPath)
def test_square_content_raises_TypeError(self):
self.assertRaises(TypeError,self.board.get_content,"notCoordinate")
def test_square_content(self):
self.board.set_content(Coordinate.a3, "white piece")
self.assertEqual(self.board.get_content(Coordinate.a3), "white piece")
def test_is_square_emtpy_raises_TypeError(self):
self.assertRaises(TypeError,self.board.is_empty,"notCoordinate")
def test_is_square_empty_true(self):
self.assertTrue(self.board.is_empty(Coordinate.d2))
def test_is_square_empty_false(self):
self.board.set_content(Coordinate.e3, 2)
self.assertFalse(self.board.is_empty(Coordinate.e3))
def test_clear_square_raises_TypeError(self):
self.assertRaises(TypeError,self.board.clear_square,"notCoordinate")
def test_clear_square(self):
self.board.set_content(Coordinate.h4, [1,2])
self.board.clear_square(Coordinate.h4)
self.assertTrue(self.board.is_empty(Coordinate.h4))
def test_clear_board(self):
for i in range(10):
square = Coordinate(random.randrange(1,64))
self.board.set_content(square,"pawn")
self.board.clear_board()
for square in Coordinate:
self.assertTrue(self.board.is_empty(square))
def test_move_content_raises_TypeError(self):
self.assertRaises(TypeError,self.board.move,
origin = Coordinate.a3,
destination = "notCoordinate")
self.assertRaises(TypeError,self.board.move,
origin = "notCoordinate",
destination = Coordinate.a3)
def test_move_content(self):
self.board.set_content(Coordinate.g8, "knight")
self.board.move(Coordinate.g8, Coordinate.f6)
self.assertEqual(self.board.get_content(Coordinate.f6),"knight")
if __name__ == '__main__':
unittest.main()
|
gamda/gameboard
|
gameboard/tests/unit_tests.py
|
Python
|
mit
| 14,863
|
#!/usr/bin/env python
import sip
sip.setapi('QVariant', 2)
import math
from PyQt4 import QtCore, QtGui, QtNetwork
from lib.Point import Point
from lib.tileOperations import *
TDIM = 256
class LightMaps(QtGui.QWidget):
def __init__(self, parent = None):
super(LightMaps, self).__init__(parent)
self.pressed = False
self.snapped = False
self._map = SlippyMap(self)
self.pressPos = QtCore.QPoint()
self.dragPos = QtCore.QPoint()
self._map.updated.connect(self.updateMap)
def setCenter(self, lat, lng):
self._map.latitude = lat
self._map.longitude = lng
self._map.invalidate()
def updateMap(self, r):
self.update(r)
def resizeEvent(self, event):
self._map.width = self.width()
self._map.height = self.height()
self._map.invalidate()
def paintEvent(self, event):
p = QtGui.QPainter()
p.begin(self)
self._map.render(p, event.rect())
p.setPen(QtCore.Qt.black)
p.end()
def mousePressEvent(self, event):
if event.buttons() != QtCore.Qt.LeftButton:
return
self.pressed = self.snapped = True
self.pressPos = self.dragPos = event.pos()
def mouseMoveEvent(self, event):
if not event.buttons():
return
if not self.pressed or not self.snapped:
delta = event.pos() - self.pressPos
self.pressPos = event.pos()
self._map.pan(delta)
return
else:
threshold = 10
delta = event.pos() - self.pressPos
if self.snapped:
self.snapped &= delta.x() < threshold
self.snapped &= delta.y() < threshold
self.snapped &= delta.x() > -threshold
self.snapped &= delta.y() > -threshold
self.dragPos = event.pos()
def mouseReleaseEvent(self, event):
self.update()
def wheelEvent(self, event):
delta = event.delta()
delta = abs(delta)/delta
self._map.change_zoom(delta)
self.update();
def keyPressEvent(self, event):
if event.key() == QtCore.Qt.Key_Left:
self._map.pan(QtCore.QPoint(20, 0))
if event.key() == QtCore.Qt.Key_Right:
self._map.pan(QtCore.QPoint(-20, 0))
if event.key() == QtCore.Qt.Key_Up:
self._map.pan(QtCore.QPoint(0, 20))
if event.key() == QtCore.Qt.Key_Down:
self._map.pan(QtCore.QPoint(0, -20))
if event.key() == QtCore.Qt.Key_Z or event.key() == QtCore.Qt.Key_Select:
self.dragPos = QtCore.QPoint(self.width() / 2, self.height() / 2)
class SlippyMap(QtCore.QObject):
updated = QtCore.pyqtSignal(QtCore.QRect)
def __init__(self, parent=None):
super(SlippyMap, self).__init__(parent)
self._offset = QtCore.QPoint()
self._tilesRect = QtCore.QRect()
self._tilePixmaps = {} # Point(x, y) to QPixmap mapping
self._manager = TileDownloader(self) ##QtNetwork.QNetworkAccessManager() #############
#self._manager.finished.connect(self.handleNetworkData)
self._url = QtCore.QUrl()
# public vars
self.width = 400
self.height = 300
self.zoom = 7
self.latitude = -30
self.longitude = -51.2
self._emptyTile = QtGui.QPixmap(TDIM, TDIM)
self._emptyTile.fill(QtCore.Qt.lightGray)
##############
###############
def invalidate(self):
if self.width <= 0 or self.height <= 0:
return
print self.latitude, self.longitude, self.zoom
tx, ty = tileIndexForCoordinate(self.latitude, self.longitude, self.zoom)
# tx = ct.x()
# ty = ct.y()
# top-left corner of the center tile
xp = int(self.width / 2 - (tx - math.floor(tx)) * TDIM)
yp = int(self.height / 2 - (ty - math.floor(ty)) * TDIM)
# first tile vertical and horizontal
xa = (xp + TDIM - 1) / TDIM
ya = (yp + TDIM - 1) / TDIM
xs = int(tx) - xa
ys = int(ty) - ya
# offset for top-left tile
self._offset = QtCore.QPoint(xp - xa * TDIM, yp - ya * TDIM)
# last tile vertical and horizontal
xe = int(tx) + (self.width - xp - 1) / TDIM
ye = int(ty) + (self.height - yp - 1) / TDIM
# build a rect
self._tilesRect = QtCore.QRect(xs, ys, xe - xs + 1, ye - ys + 1)
if self._url.isEmpty():
self._manager.download()
self.updated.emit(QtCore.QRect(0, 0, self.width, self.height))
def render(self, painter, rect):
for x in range(self._tilesRect.width()):
for y in range(self._tilesRect.height()):
print x, y
tp = Point(x + self._tilesRect.left(), y + self._tilesRect.top())
box = QtCore.QRect(self._manager.tileRect(tp))
if rect.intersects(box):
print "Box", box
painter.drawPixmap(box, self._tilePixmaps.get(tp, self._emptyTile))
def pan(self, delta):
dx = QtCore.QPointF(delta) / float(TDIM)
cx, cy = tileIndexForCoordinate(self.latitude, self.longitude, self.zoom)
center = QtCore.QPointF(cx, cy) - dx
self.latitude = latitudeFromTileY(center.y(), self.zoom)
self.longitude = longitudeFromTileX(center.x(), self.zoom)
self.invalidate()
def change_zoom(self, val):
self.zoom = max(1, min(22, self.zoom + val))
print "ZOOM", self.zoom
self.invalidate();
############################
class TileDownloader(QtNetwork.QNetworkAccessManager):
updated = QtCore.pyqtSignal(QtCore.QRect)
def __init__(self, parent=None):
super(TileDownloader, self).__init__()
self.parent = parent
cache = QtNetwork.QNetworkDiskCache()
cache.setCacheDirectory(
QtGui.QDesktopServices.storageLocation
(QtGui.QDesktopServices.CacheLocation))
self.setCache(cache)
self.finished.connect(self.handleNetworkData)
# slots
def handleNetworkData(self, reply):
img = QtGui.QImage()
tp = Point(reply.request().attribute(QtNetwork.QNetworkRequest.User))
url = reply.url()
if not reply.error():
if img.load(reply, None):
self.parent._tilePixmaps[tp] = QtGui.QPixmap.fromImage(img)
reply.deleteLater()
self.parent.updated.emit(self.tileRect(tp))
# purge unused tiles
bound = self.parent._tilesRect.adjusted(-2, -2, 2, 2)
for tp in list(self.parent._tilePixmaps.keys()):
if not bound.contains(tp):
del self.parent._tilePixmaps[tp]
self.download()
def download(self):
grab = None
for x in range(self.parent._tilesRect.width()):
for y in range(self.parent._tilesRect.height()):
tp = Point(self.parent._tilesRect.topLeft() + QtCore.QPoint(x, y))
if tp not in self.parent._tilePixmaps:
grab = QtCore.QPoint(tp)
break
if grab is None:
self._url = QtCore.QUrl()
return
#path = 'http://tile.openstreetmap.org/%d/%d/%d.png' % (self.zoom, grab.x(), grab.y())
path = 'https://mts2.google.com/vt?lyrs=y&x={0}&y={1}&z={2}'.format(grab.x(), grab.y(), self.parent.zoom)
print path
self._url = QtCore.QUrl(path)
request = QtNetwork.QNetworkRequest()
request.setUrl(self._url)
request.setRawHeader('User-Agent', 'Nokia (PyQt) Graphics Dojo 1.0')
request.setAttribute(QtNetwork.QNetworkRequest.User, grab)
self.get(request)
################################
def tileRect(self, tp):
t = tp - self.parent._tilesRect.topLeft()
x = t.x() * TDIM + self.parent._offset.x()
y = t.y() * TDIM + self.parent._offset.y()
return QtCore.QRect(x, y, TDIM, TDIM)
if __name__ == '__main__':
import sys
class MapZoom(QtGui.QMainWindow):
def __init__(self):
super(MapZoom, self).__init__(None)
self.map_ = LightMaps(self)
self.map_.setFocus()
self.setCentralWidget(self.map_)
app = QtGui.QApplication(sys.argv)
app.setApplicationName('LightMaps')
w = MapZoom()
w.setWindowTitle("Slippy Map Demo")
w.resize(600, 450)
w.show()
sys.exit(app.exec_())
|
heltonbiker/MapComplete
|
PyQt/SlippyMapOriginal.py
|
Python
|
mit
| 8,532
|
import logging
from wtforms import Form, SelectField, SelectMultipleField, validators
import savant.comparisons
import savant.sets
import savant.diffs
class DiffForm(Form):
diffs = SelectMultipleField('Diffs', [validators.Required()], choices=[])
class DiffNamingForm(DiffForm):
action = SelectField('Action', [validators.Required()], choices=[('add', 'Adds'),('subtract', 'Subtracts')])
system = SelectField('System', [validators.Required()], choices=[])
name = SelectField('Named', [validators.Required()], choices=[])
class DDiffBase(object):
'''Shared methods for DDiff Forms.'''
def get_diff_choices(self, diff_ids, exclude_set_ids=[]):
diff_choices = []
prev_system = ''
prev_action = ''
self.logger.debug('excluding set ids: %s',exclude_set_ids)
# take a list of savant set objects
# turn them into a python set, for disjoint comparisons
exclude_set_ids = set(exclude_set_ids)
# yes this is confusing
for id in sorted(diff_ids):
diff = savant.diffs.Diff(id)
set_ids_with_diff = savant.sets.find_with_diff(diff, self.db)
# the sets this diff is in are part of the excluded ids?
if not exclude_set_ids.isdisjoint(set_ids_with_diff):
continue
set_id_count = str(len(set_ids_with_diff))
if diff.system != prev_system:
diff_choices.append(('Isystem', diff.system))
prev_system = diff.system
prev_action = ''
if diff.action != prev_action:
diff_choices.append(('I'+diff.action, '- '+diff.action))
prev_action = diff.action
diff_choices.append(
(diff.id, '--- '+diff.name+' ('+set_id_count+')'))
return(diff_choices)
def get_valid_choices(self, given_choices):
choices = []
for diff in given_choices:
if diff.startswith('I'):
continue
choices.append(diff)
return choices
class DDiffForm(DDiffBase):
'''This is the dynamic version of the DiffForm.'''
def __init__(self, db, set_id, request=None):
self.logger = logging.getLogger(__name__ + '.' + type(self).__name__)
self.db = db
self.set_id = set_id
self.set_obj = savant.sets.Set(self.db, self.set_id)
self.delete_diffs = []
if request is None:
self.form = DiffForm()
else:
self.form = DiffForm(request.form)
self.delete_diffs = self.get_valid_choices(self.form.diffs.data)
self.form.diffs.choices = self.get_diff_choices(self.set_obj.get_diff_ids())
class DDiffNamingForm(DDiffBase):
'''This is the dynamic version of the DiffNamingForm.'''
def __init__(self, db, comparison_id, request=None, exclude_set_ids=[]):
self.logger = logging.getLogger(__name__ + '.' + type(self).__name__)
self.db = db
self.comparison_id = comparison_id
self.set_id = None
self.set_choices = []
if request is None:
self.form = DiffNamingForm()
else:
self.form = DiffNamingForm(request.form)
self.set_id = self.form.action.data +'|'+ self.form.system.data +'|'+ self.form.name.data
self.set_choices = self.get_valid_choices(self.form.diffs.data)
comparison = savant.comparisons.Comparison(db, id=comparison_id)
self.form.diffs.choices = self.get_diff_choices(comparison.get_diff_ids(), exclude_set_ids)
self.form.name.choices = self.get_name_choices()
self.system_choices = []
for system_name in sorted(comparison.get_systems()):
self.system_choices.append((system_name, system_name.capitalize()))
self.form.system.choices = self.system_choices
def get_name_choices(self):
'''Get name options based on the diff choice names set in this form.'''
name_choices = []
for choice in self.form.diffs.choices:
if choice[0].startswith('I'):
continue
diff = savant.diffs.Diff(choice[0])
name_choices.append((diff.name, diff.name))
return sorted(list(set(name_choices)))
|
Doveps/mono
|
savant-web/forms.py
|
Python
|
mit
| 4,267
|
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2015, David P. D. Moss. All rights reserved.
#
# Released under the BSD license. See the LICENSE file for details.
#-----------------------------------------------------------------------------
"""
IEEE 48-bit EUI (MAC address) logic.
Supports numerous MAC string formats including Cisco's triple hextet as well
as bare MACs containing no delimiters.
"""
import struct as _struct
import re as _re
# Check whether we need to use fallback code or not.
try:
from socket import AF_LINK
except ImportError:
AF_LINK = 48
from netaddr.core import AddrFormatError
from netaddr.strategy import \
valid_words as _valid_words, \
int_to_words as _int_to_words, \
words_to_int as _words_to_int, \
valid_bits as _valid_bits, \
bits_to_int as _bits_to_int, \
int_to_bits as _int_to_bits, \
valid_bin as _valid_bin, \
int_to_bin as _int_to_bin, \
bin_to_int as _bin_to_int
from netaddr.compat import _is_str
#: The width (in bits) of this address type.
width = 48
#: The AF_* constant value of this address type.
family = AF_LINK
#: A friendly string name address type.
family_name = 'MAC'
#: The version of this address type.
version = 48
#: The maximum integer value that can be represented by this address type.
max_int = 2 ** width - 1
#-----------------------------------------------------------------------------
# Dialect classes.
#-----------------------------------------------------------------------------
class mac_eui48(object):
"""A standard IEEE EUI-48 dialect class."""
#: The individual word size (in bits) of this address type.
word_size = 8
#: The number of words in this address type.
num_words = width // word_size
#: The maximum integer value for an individual word in this address type.
max_word = 2 ** word_size - 1
#: The separator character used between each word.
word_sep = '-'
#: The format string to be used when converting words to string values.
word_fmt = '%.2X'
#: The number base to be used when interpreting word values as integers.
word_base = 16
class mac_unix(mac_eui48):
"""A UNIX-style MAC address dialect class."""
word_size = 8
num_words = width // word_size
word_sep = ':'
word_fmt = '%x'
word_base = 16
class mac_unix_expanded(mac_unix):
"""A UNIX-style MAC address dialect class with leading zeroes."""
word_fmt = '%.2x'
class mac_cisco(mac_eui48):
"""A Cisco 'triple hextet' MAC address dialect class."""
word_size = 16
num_words = width // word_size
word_sep = '.'
word_fmt = '%.4x'
word_base = 16
class mac_bare(mac_eui48):
"""A bare (no delimiters) MAC address dialect class."""
word_size = 48
num_words = width // word_size
word_sep = ''
word_fmt = '%.12X'
word_base = 16
class mac_pgsql(mac_eui48):
"""A PostgreSQL style (2 x 24-bit words) MAC address dialect class."""
word_size = 24
num_words = width // word_size
word_sep = ':'
word_fmt = '%.6x'
word_base = 16
#: The default dialect to be used when not specified by the user.
DEFAULT_DIALECT = mac_eui48
#-----------------------------------------------------------------------------
#: Regular expressions to match all supported MAC address formats.
RE_MAC_FORMATS = (
# 2 bytes x 6 (UNIX, Windows, EUI-48)
'^' + ':'.join(['([0-9A-F]{1,2})'] * 6) + '$',
'^' + '-'.join(['([0-9A-F]{1,2})'] * 6) + '$',
# 4 bytes x 3 (Cisco)
'^' + ':'.join(['([0-9A-F]{1,4})'] * 3) + '$',
'^' + '-'.join(['([0-9A-F]{1,4})'] * 3) + '$',
'^' + '\.'.join(['([0-9A-F]{1,4})'] * 3) + '$',
# 6 bytes x 2 (PostgreSQL)
'^' + '-'.join(['([0-9A-F]{5,6})'] * 2) + '$',
'^' + ':'.join(['([0-9A-F]{5,6})'] * 2) + '$',
# 12 bytes (bare, no delimiters)
'^(' + ''.join(['[0-9A-F]'] * 12) + ')$',
'^(' + ''.join(['[0-9A-F]'] * 11) + ')$',
)
# For efficiency, each string regexp converted in place to its compiled
# counterpart.
RE_MAC_FORMATS = [_re.compile(_, _re.IGNORECASE) for _ in RE_MAC_FORMATS]
def valid_str(addr):
"""
:param addr: An IEEE EUI-48 (MAC) address in string form.
:return: ``True`` if MAC address string is valid, ``False`` otherwise.
"""
for regexp in RE_MAC_FORMATS:
try:
match_result = regexp.findall(addr)
if len(match_result) != 0:
return True
except TypeError:
pass
return False
def str_to_int(addr):
"""
:param addr: An IEEE EUI-48 (MAC) address in string form.
:return: An unsigned integer that is equivalent to value represented
by EUI-48/MAC string address formatted according to the dialect
settings.
"""
words = []
if _is_str(addr):
found_match = False
for regexp in RE_MAC_FORMATS:
match_result = regexp.findall(addr)
if len(match_result) != 0:
found_match = True
if isinstance(match_result[0], tuple):
words = match_result[0]
else:
words = (match_result[0],)
break
if not found_match:
raise AddrFormatError('%r is not a supported MAC format!' % addr)
else:
raise TypeError('%r is not str() or unicode()!' % addr)
int_val = None
if len(words) == 6:
# 2 bytes x 6 (UNIX, Windows, EUI-48)
int_val = int(''.join(['%.2x' % int(w, 16) for w in words]), 16)
elif len(words) == 3:
# 4 bytes x 3 (Cisco)
int_val = int(''.join(['%.4x' % int(w, 16) for w in words]), 16)
elif len(words) == 2:
# 6 bytes x 2 (PostgreSQL)
int_val = int(''.join(['%.6x' % int(w, 16) for w in words]), 16)
elif len(words) == 1:
# 12 bytes (bare, no delimiters)
int_val = int('%012x' % int(words[0], 16), 16)
else:
raise AddrFormatError('unexpected word count in MAC address %r!' \
% addr)
return int_val
def int_to_str(int_val, dialect=None):
"""
:param int_val: An unsigned integer.
:param dialect: (optional) a Python class defining formatting options.
:return: An IEEE EUI-48 (MAC) address string that is equivalent to
unsigned integer formatted according to the dialect settings.
"""
if dialect is None:
dialect = mac_eui48
words = int_to_words(int_val, dialect)
tokens = [dialect.word_fmt % i for i in words]
addr = dialect.word_sep.join(tokens)
return addr
def int_to_packed(int_val):
"""
:param int_val: the integer to be packed.
:return: a packed string that is equivalent to value represented by an
unsigned integer.
"""
return _struct.pack(">HI", int_val >> 32, int_val & 0xffffffff)
def packed_to_int(packed_int):
"""
:param packed_int: a packed string containing an unsigned integer.
It is assumed that string is packed in network byte order.
:return: An unsigned integer equivalent to value of network address
represented by packed binary string.
"""
words = list(_struct.unpack('>6B', packed_int))
int_val = 0
for i, num in enumerate(reversed(words)):
word = num
word = word << 8 * i
int_val = int_val | word
return int_val
def valid_words(words, dialect=None):
if dialect is None:
dialect = DEFAULT_DIALECT
return _valid_words(words, dialect.word_size, dialect.num_words)
def int_to_words(int_val, dialect=None):
if dialect is None:
dialect = DEFAULT_DIALECT
return _int_to_words(int_val, dialect.word_size, dialect.num_words)
def words_to_int(words, dialect=None):
if dialect is None:
dialect = DEFAULT_DIALECT
return _words_to_int(words, dialect.word_size, dialect.num_words)
def valid_bits(bits, dialect=None):
if dialect is None:
dialect = DEFAULT_DIALECT
return _valid_bits(bits, width, dialect.word_sep)
def bits_to_int(bits, dialect=None):
if dialect is None:
dialect = DEFAULT_DIALECT
return _bits_to_int(bits, width, dialect.word_sep)
def int_to_bits(int_val, dialect=None):
if dialect is None:
dialect = DEFAULT_DIALECT
return _int_to_bits(int_val, dialect.word_size, dialect.num_words,
dialect.word_sep)
def valid_bin(bin_val, dialect=None):
if dialect is None:
dialect = DEFAULT_DIALECT
return _valid_bin(bin_val, width)
def int_to_bin(int_val):
return _int_to_bin(int_val, width)
def bin_to_int(bin_val):
return _bin_to_int(bin_val, width)
|
DaKnOb/mwhois
|
netaddr/strategy/eui48.py
|
Python
|
mit
| 8,693
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Zhassulan Zhussupov
# Author zhzhussupovkz@gmail.com
# Tree class
import random
class Tree:
def __init__(self, world, screen, x, y):
self.world, self.pygame = world, world.pygame
self.screen = screen
model = random.choice(['tree.png', 'tree_1.png'])
self.image = self.pygame.image.load("./images/houses/" + model)
self.x, self.y = x, y
def draw(self):
self.screen.blit(self.image, [self.x, self.y])
def move(self):
if self.y >= 480:
self.change()
self.y = 0
key = self.pygame.key.get_pressed()
if key[self.pygame.K_UP]:
if self.world.taxi.gear == 1:
self.y += 0.2
elif self.world.taxi.gear == 2:
self.y += 0.4
elif self.world.taxi.gear == 3:
self.y += 0.5
elif self.world.taxi.gear == 4:
self.y += 0.75
def change(self):
model = random.choice(['tree.png', 'tree_1.png'])
self.image = self.pygame.image.load("./images/houses/" + model)
|
zhzhussupovkz/taxi-py
|
core/tree.py
|
Python
|
mit
| 1,125
|
"""Integration with the Rachio Iro sprinkler system controller."""
from abc import abstractmethod
from contextlib import suppress
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.switch import SwitchEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ENTITY_ID, ATTR_ID
from homeassistant.core import HomeAssistant, ServiceCall, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import as_timestamp, now, parse_datetime, utc_from_timestamp
from .const import (
CONF_MANUAL_RUN_MINS,
DEFAULT_MANUAL_RUN_MINS,
DOMAIN as DOMAIN_RACHIO,
KEY_CUSTOM_CROP,
KEY_CUSTOM_SHADE,
KEY_CUSTOM_SLOPE,
KEY_DEVICE_ID,
KEY_DURATION,
KEY_ENABLED,
KEY_ID,
KEY_IMAGE_URL,
KEY_NAME,
KEY_ON,
KEY_RAIN_DELAY,
KEY_RAIN_DELAY_END,
KEY_SCHEDULE_ID,
KEY_SUBTYPE,
KEY_SUMMARY,
KEY_TYPE,
KEY_ZONE_ID,
KEY_ZONE_NUMBER,
SCHEDULE_TYPE_FIXED,
SCHEDULE_TYPE_FLEX,
SERVICE_SET_ZONE_MOISTURE,
SERVICE_START_MULTIPLE_ZONES,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
SIGNAL_RACHIO_SCHEDULE_UPDATE,
SIGNAL_RACHIO_ZONE_UPDATE,
SLOPE_FLAT,
SLOPE_MODERATE,
SLOPE_SLIGHT,
SLOPE_STEEP,
)
from .entity import RachioDevice
from .webhooks import (
SUBTYPE_RAIN_DELAY_OFF,
SUBTYPE_RAIN_DELAY_ON,
SUBTYPE_SCHEDULE_COMPLETED,
SUBTYPE_SCHEDULE_STARTED,
SUBTYPE_SCHEDULE_STOPPED,
SUBTYPE_SLEEP_MODE_OFF,
SUBTYPE_SLEEP_MODE_ON,
SUBTYPE_ZONE_COMPLETED,
SUBTYPE_ZONE_PAUSED,
SUBTYPE_ZONE_STARTED,
SUBTYPE_ZONE_STOPPED,
)
_LOGGER = logging.getLogger(__name__)
ATTR_DURATION = "duration"
ATTR_PERCENT = "percent"
ATTR_SCHEDULE_SUMMARY = "Summary"
ATTR_SCHEDULE_ENABLED = "Enabled"
ATTR_SCHEDULE_DURATION = "Duration"
ATTR_SCHEDULE_TYPE = "Type"
ATTR_SORT_ORDER = "sortOrder"
ATTR_ZONE_NUMBER = "Zone number"
ATTR_ZONE_SHADE = "Shade"
ATTR_ZONE_SLOPE = "Slope"
ATTR_ZONE_SUMMARY = "Summary"
ATTR_ZONE_TYPE = "Type"
START_MULTIPLE_ZONES_SCHEMA = vol.Schema(
{
vol.Required(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_DURATION): cv.ensure_list_csv,
}
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up the Rachio switches."""
zone_entities = []
has_flex_sched = False
entities = await hass.async_add_executor_job(_create_entities, hass, config_entry)
for entity in entities:
if isinstance(entity, RachioZone):
zone_entities.append(entity)
if isinstance(entity, RachioSchedule) and entity.type == SCHEDULE_TYPE_FLEX:
has_flex_sched = True
async_add_entities(entities)
_LOGGER.info("%d Rachio switch(es) added", len(entities))
def start_multiple(service: ServiceCall) -> None:
"""Service to start multiple zones in sequence."""
zones_list = []
person = hass.data[DOMAIN_RACHIO][config_entry.entry_id]
entity_id = service.data[ATTR_ENTITY_ID]
duration = iter(service.data[ATTR_DURATION])
default_time = service.data[ATTR_DURATION][0]
entity_to_zone_id = {
entity.entity_id: entity.zone_id for entity in zone_entities
}
for (count, data) in enumerate(entity_id):
if data in entity_to_zone_id:
# Time can be passed as a list per zone,
# or one time for all zones
time = int(next(duration, default_time)) * 60
zones_list.append(
{
ATTR_ID: entity_to_zone_id.get(data),
ATTR_DURATION: time,
ATTR_SORT_ORDER: count,
}
)
if len(zones_list) != 0:
person.start_multiple_zones(zones_list)
_LOGGER.debug("Starting zone(s) %s", entity_id)
else:
raise HomeAssistantError("No matching zones found in given entity_ids")
hass.services.async_register(
DOMAIN_RACHIO,
SERVICE_START_MULTIPLE_ZONES,
start_multiple,
schema=START_MULTIPLE_ZONES_SCHEMA,
)
if has_flex_sched:
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
SERVICE_SET_ZONE_MOISTURE,
{vol.Required(ATTR_PERCENT): cv.positive_int},
"set_moisture_percent",
)
def _create_entities(hass, config_entry):
entities = []
person = hass.data[DOMAIN_RACHIO][config_entry.entry_id]
# Fetch the schedule once at startup
# in order to avoid every zone doing it
for controller in person.controllers:
entities.append(RachioStandbySwitch(controller))
entities.append(RachioRainDelay(controller))
zones = controller.list_zones()
schedules = controller.list_schedules()
flex_schedules = controller.list_flex_schedules()
current_schedule = controller.current_schedule
for zone in zones:
entities.append(RachioZone(person, controller, zone, current_schedule))
for sched in schedules + flex_schedules:
entities.append(RachioSchedule(person, controller, sched, current_schedule))
_LOGGER.debug("Added %s", entities)
return entities
class RachioSwitch(RachioDevice, SwitchEntity):
"""Represent a Rachio state that can be toggled."""
def __init__(self, controller):
"""Initialize a new Rachio switch."""
super().__init__(controller)
self._state = None
@property
def name(self) -> str:
"""Get a name for this switch."""
return f"Switch on {self._controller.name}"
@property
def is_on(self) -> bool:
"""Return whether the switch is currently on."""
return self._state
@callback
def _async_handle_any_update(self, *args, **kwargs) -> None:
"""Determine whether an update event applies to this device."""
if args[0][KEY_DEVICE_ID] != self._controller.controller_id:
# For another device
return
# For this device
self._async_handle_update(args, kwargs)
@abstractmethod
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle incoming webhook data."""
class RachioStandbySwitch(RachioSwitch):
"""Representation of a standby status/button."""
@property
def name(self) -> str:
"""Return the name of the standby switch."""
return f"{self._controller.name} in standby mode"
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and purpose."""
return f"{self._controller.controller_id}-standby"
@property
def icon(self) -> str:
"""Return an icon for the standby switch."""
return "mdi:power"
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Update the state using webhook data."""
if args[0][0][KEY_SUBTYPE] == SUBTYPE_SLEEP_MODE_ON:
self._state = True
elif args[0][0][KEY_SUBTYPE] == SUBTYPE_SLEEP_MODE_OFF:
self._state = False
self.async_write_ha_state()
def turn_on(self, **kwargs) -> None:
"""Put the controller in standby mode."""
self._controller.rachio.device.turn_off(self._controller.controller_id)
def turn_off(self, **kwargs) -> None:
"""Resume controller functionality."""
self._controller.rachio.device.turn_on(self._controller.controller_id)
async def async_added_to_hass(self):
"""Subscribe to updates."""
if KEY_ON in self._controller.init_data:
self._state = not self._controller.init_data[KEY_ON]
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_RACHIO_CONTROLLER_UPDATE,
self._async_handle_any_update,
)
)
class RachioRainDelay(RachioSwitch):
"""Representation of a rain delay status/switch."""
def __init__(self, controller):
"""Set up a Rachio rain delay switch."""
self._cancel_update = None
super().__init__(controller)
@property
def name(self) -> str:
"""Return the name of the switch."""
return f"{self._controller.name} rain delay"
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and purpose."""
return f"{self._controller.controller_id}-delay"
@property
def icon(self) -> str:
"""Return an icon for rain delay."""
return "mdi:camera-timer"
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Update the state using webhook data."""
if self._cancel_update:
self._cancel_update()
self._cancel_update = None
if args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_DELAY_ON:
endtime = parse_datetime(args[0][0][KEY_RAIN_DELAY_END])
_LOGGER.debug("Rain delay expires at %s", endtime)
self._state = True
assert endtime is not None
self._cancel_update = async_track_point_in_utc_time(
self.hass, self._delay_expiration, endtime
)
elif args[0][0][KEY_SUBTYPE] == SUBTYPE_RAIN_DELAY_OFF:
self._state = False
self.async_write_ha_state()
@callback
def _delay_expiration(self, *args) -> None:
"""Trigger when a rain delay expires."""
self._state = False
self._cancel_update = None
self.async_write_ha_state()
def turn_on(self, **kwargs) -> None:
"""Activate a 24 hour rain delay on the controller."""
self._controller.rachio.device.rain_delay(self._controller.controller_id, 86400)
_LOGGER.debug("Starting rain delay for 24 hours")
def turn_off(self, **kwargs) -> None:
"""Resume controller functionality."""
self._controller.rachio.device.rain_delay(self._controller.controller_id, 0)
_LOGGER.debug("Canceling rain delay")
async def async_added_to_hass(self):
"""Subscribe to updates."""
if KEY_RAIN_DELAY in self._controller.init_data:
self._state = self._controller.init_data[
KEY_RAIN_DELAY
] / 1000 > as_timestamp(now())
# If the controller was in a rain delay state during a reboot, this re-sets the timer
if self._state is True:
delay_end = utc_from_timestamp(
self._controller.init_data[KEY_RAIN_DELAY] / 1000
)
_LOGGER.debug("Re-setting rain delay timer for %s", delay_end)
self._cancel_update = async_track_point_in_utc_time(
self.hass, self._delay_expiration, delay_end
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
SIGNAL_RACHIO_RAIN_DELAY_UPDATE,
self._async_handle_any_update,
)
)
class RachioZone(RachioSwitch):
"""Representation of one zone of sprinklers connected to the Rachio Iro."""
def __init__(self, person, controller, data, current_schedule):
"""Initialize a new Rachio Zone."""
self.id = data[KEY_ID]
self._zone_name = data[KEY_NAME]
self._zone_number = data[KEY_ZONE_NUMBER]
self._zone_enabled = data[KEY_ENABLED]
self._entity_picture = data.get(KEY_IMAGE_URL)
self._person = person
self._shade_type = data.get(KEY_CUSTOM_SHADE, {}).get(KEY_NAME)
self._zone_type = data.get(KEY_CUSTOM_CROP, {}).get(KEY_NAME)
self._slope_type = data.get(KEY_CUSTOM_SLOPE, {}).get(KEY_NAME)
self._summary = ""
self._current_schedule = current_schedule
super().__init__(controller)
def __str__(self):
"""Display the zone as a string."""
return f'Rachio Zone "{self.name}" on {str(self._controller)}'
@property
def zone_id(self) -> str:
"""How the Rachio API refers to the zone."""
return self.id
@property
def name(self) -> str:
"""Return the friendly name of the zone."""
return self._zone_name
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and zone number."""
return f"{self._controller.controller_id}-zone-{self.zone_id}"
@property
def icon(self) -> str:
"""Return the icon to display."""
return "mdi:water"
@property
def zone_is_enabled(self) -> bool:
"""Return whether the zone is allowed to run."""
return self._zone_enabled
@property
def entity_picture(self):
"""Return the entity picture to use in the frontend, if any."""
return self._entity_picture
@property
def extra_state_attributes(self) -> dict:
"""Return the optional state attributes."""
props = {ATTR_ZONE_NUMBER: self._zone_number, ATTR_ZONE_SUMMARY: self._summary}
if self._shade_type:
props[ATTR_ZONE_SHADE] = self._shade_type
if self._zone_type:
props[ATTR_ZONE_TYPE] = self._zone_type
if self._slope_type:
if self._slope_type == SLOPE_FLAT:
props[ATTR_ZONE_SLOPE] = "Flat"
elif self._slope_type == SLOPE_SLIGHT:
props[ATTR_ZONE_SLOPE] = "Slight"
elif self._slope_type == SLOPE_MODERATE:
props[ATTR_ZONE_SLOPE] = "Moderate"
elif self._slope_type == SLOPE_STEEP:
props[ATTR_ZONE_SLOPE] = "Steep"
return props
def turn_on(self, **kwargs) -> None:
"""Start watering this zone."""
# Stop other zones first
self.turn_off()
# Start this zone
manual_run_time = timedelta(
minutes=self._person.config_entry.options.get(
CONF_MANUAL_RUN_MINS, DEFAULT_MANUAL_RUN_MINS
)
)
# The API limit is 3 hours, and requires an int be passed
self._controller.rachio.zone.start(self.zone_id, manual_run_time.seconds)
_LOGGER.debug(
"Watering %s on %s for %s",
self.name,
self._controller.name,
str(manual_run_time),
)
def turn_off(self, **kwargs) -> None:
"""Stop watering all zones."""
self._controller.stop_watering()
def set_moisture_percent(self, percent) -> None:
"""Set the zone moisture percent."""
_LOGGER.debug("Setting %s moisture to %s percent", self._zone_name, percent)
self._controller.rachio.zone.set_moisture_percent(self.id, percent / 100)
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle incoming webhook zone data."""
if args[0][KEY_ZONE_ID] != self.zone_id:
return
self._summary = args[0][KEY_SUMMARY]
if args[0][KEY_SUBTYPE] == SUBTYPE_ZONE_STARTED:
self._state = True
elif args[0][KEY_SUBTYPE] in [
SUBTYPE_ZONE_STOPPED,
SUBTYPE_ZONE_COMPLETED,
SUBTYPE_ZONE_PAUSED,
]:
self._state = False
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self._state = self.zone_id == self._current_schedule.get(KEY_ZONE_ID)
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_RACHIO_ZONE_UPDATE, self._async_handle_update
)
)
class RachioSchedule(RachioSwitch):
"""Representation of one fixed schedule on the Rachio Iro."""
def __init__(self, person, controller, data, current_schedule):
"""Initialize a new Rachio Schedule."""
self._schedule_id = data[KEY_ID]
self._schedule_name = data[KEY_NAME]
self._duration = data[KEY_DURATION]
self._schedule_enabled = data[KEY_ENABLED]
self._summary = data[KEY_SUMMARY]
self.type = data.get(KEY_TYPE, SCHEDULE_TYPE_FIXED)
self._current_schedule = current_schedule
super().__init__(controller)
@property
def name(self) -> str:
"""Return the friendly name of the schedule."""
return f"{self._schedule_name} Schedule"
@property
def unique_id(self) -> str:
"""Return a unique id by combining controller id and schedule."""
return f"{self._controller.controller_id}-schedule-{self._schedule_id}"
@property
def icon(self) -> str:
"""Return the icon to display."""
return "mdi:water" if self.schedule_is_enabled else "mdi:water-off"
@property
def extra_state_attributes(self) -> dict:
"""Return the optional state attributes."""
return {
ATTR_SCHEDULE_SUMMARY: self._summary,
ATTR_SCHEDULE_ENABLED: self.schedule_is_enabled,
ATTR_SCHEDULE_DURATION: f"{round(self._duration / 60)} minutes",
ATTR_SCHEDULE_TYPE: self.type,
}
@property
def schedule_is_enabled(self) -> bool:
"""Return whether the schedule is allowed to run."""
return self._schedule_enabled
def turn_on(self, **kwargs) -> None:
"""Start this schedule."""
self._controller.rachio.schedulerule.start(self._schedule_id)
_LOGGER.debug(
"Schedule %s started on %s",
self.name,
self._controller.name,
)
def turn_off(self, **kwargs) -> None:
"""Stop watering all zones."""
self._controller.stop_watering()
@callback
def _async_handle_update(self, *args, **kwargs) -> None:
"""Handle incoming webhook schedule data."""
# Schedule ID not passed when running individual zones, so we catch that error
with suppress(KeyError):
if args[0][KEY_SCHEDULE_ID] == self._schedule_id:
if args[0][KEY_SUBTYPE] in [SUBTYPE_SCHEDULE_STARTED]:
self._state = True
elif args[0][KEY_SUBTYPE] in [
SUBTYPE_SCHEDULE_STOPPED,
SUBTYPE_SCHEDULE_COMPLETED,
]:
self._state = False
self.async_write_ha_state()
async def async_added_to_hass(self):
"""Subscribe to updates."""
self._state = self._schedule_id == self._current_schedule.get(KEY_SCHEDULE_ID)
self.async_on_remove(
async_dispatcher_connect(
self.hass, SIGNAL_RACHIO_SCHEDULE_UPDATE, self._async_handle_update
)
)
|
rohitranjan1991/home-assistant
|
homeassistant/components/rachio/switch.py
|
Python
|
mit
| 19,041
|
import os
from twisted.trial import unittest
from lisa.server.plugins.PluginManager import PluginManagerSingleton
class LisaPluginTestCase(unittest.TestCase):
def setUp(self):
self.pluginManager = PluginManagerSingleton.get()
def test_a_install_plugin_ok(self):
answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True, version='0.1.6')
self.assertEqual(answer['status'], "success")
def test_aa_install_plugin_fail(self):
answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "fail")
def test_b_disable_plugin_ok(self):
answer = self.pluginManager.disablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_bb_disable_plugin_fail(self):
answer = self.pluginManager.disablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
def test_c_enable_plugin_ok(self):
answer = self.pluginManager.enablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_cc_enable_plugin_fail(self):
answer = self.pluginManager.enablePlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
def test_d_upgrade_plugin_ok(self):
answer = self.pluginManager.upgradePlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "success")
def test_dd_upgrade_plugin_fail(self):
answer = self.pluginManager.upgradePlugin(plugin_name="UnitTest", test_mode=True)
self.assertEqual(answer['status'], "fail")
def test_e_load_plugin(self):
answer = self.pluginManager.loadPlugins()
test_list = ['UnitTest']
self.assertListEqual(answer, test_list)
def test_f_methodList_plugin(self):
answer = self.pluginManager.methodListPlugin()
methodlist = [{'methods': ['test'], 'plugin': u'UnitTest'}, {'core': 'intents', 'methods': ['list']}]
self.assertListEqual(answer, methodlist)
def test_g_create_plugin(self):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lisa.server.web.weblisa.settings")
answer = self.pluginManager.createPlugin(plugin_name="TestPlugin", author_name="TestAuthor",
author_email="test@test.com")
self.assertEqual(answer['status'], "success")
def test_h_uninstall_plugin(self):
answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "success")
def test_hh_uninstall_plugin(self):
answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest")
self.assertEqual(answer['status'], "fail")
|
Seraf/LISA
|
lisa/server/tests/test_plugins.py
|
Python
|
mit
| 2,790
|
# -*- coding: utf-8 -*-
class Item(object):
def __init__(self, uid, name, time):
self._uid = uid
self._name = name
self._time = time
@property
def name(self):
return self._name
@property
def uid(self):
return self._uid
def display(self, size):
delta = size - len(self._name) - len(self._time) -1
if delta < 0:
return self._name[:size-len(self._time)-1] + ' ' + self._time
else:
return self._name + ' '*(delta+1) + self._time
class ItemList(object):
def __init__(self):
self.clear()
def clear(self):
self._items = []
self._offset = 0
self._position = 0
self._selected = []
def add(self, item):
self._items.append(item)
def go_up(self):
self._position -= 1
self._position %= len(self._items)
def go_down(self):
self._position += 1
self._position %= len(self._items)
def go_top(self):
self._position = 0
def go_bottom(self):
self._position = len(self._items)-1
def is_empty(self):
return len(self._items) == 0
def get_current_uid(self):
return self._items[self._position].uid
def _compute_offset(self, max_len):
if self._position < self._offset:
self._offset = self._position
elif self._position-self._offset > max_len-1:
self._offset = self._position-max_len+1
def visible_items(self, max_len):
self._compute_offset(max_len)
return self._items[self._offset:self._offset+max_len]
def select(self):
if self._position in self._selected:
self._selected.remove(self._position)
else:
self._selected.append(self._position)
def unselect_all(self):
self._selected = []
def has_selection(self):
return len(self._selected) > 0
def selected_items(self):
for i in self._selected:
yield self._items[i]
def position_item(self):
return self._items[self._position]
def is_selected(self, i, offset=True):
if offset:
i += self._offset
return i in self._selected
def is_position(self, i, offset=True):
if offset:
i += self._offset
return i == self._position
class Playlist(object):
def __init__(self, space=1):
self._space = space
self.clear()
def clear(self):
self._list = []
self._iplay = 0
self._offset = 0
def add(self, item):
self._list.append(item)
def is_over(self):
return self._iplay >= len(self._list)
def current_uid(self):
return self._list[self._iplay].uid
def next(self, step=1, secure=True):
self._iplay += step
if secure and self._iplay > len(self._list):
self._iplay = len(self._list)
def previous(self, step=1, secure=True):
self._iplay -= step
if secure and self._iplay < 0:
self._iplay = 0
def _compute_offset(self, max_len):
if self._iplay-self._space < self._offset:
self._offset = max(0, self._iplay-self._space)
elif self._iplay - self._offset > max_len-self._space-1:
self._offset = min(len(self._list)-max_len,
self._iplay-max_len+self._space+1)
def visible_items(self, max_len):
self._compute_offset(max_len)
return self._list[self._offset:self._offset+max_len]
def is_current(self, i, offset=True):
if offset:
i += self._offset
return i == self._iplay
|
NiZiL/clitube
|
clitube/model.py
|
Python
|
mit
| 3,663
|
from marshmallow import Schema, fields, post_load, EXCLUDE
from ..resource import Resource
from collections import namedtuple
class Plan(Resource):
"""
https://dev.chartmogul.com/v1.0/reference#plans
"""
_path = "/plans{/uuid}"
_root_key = 'plans'
_many = namedtuple('Plans', [_root_key, "current_page", "total_pages"])
class _Schema(Schema):
uuid = fields.String()
data_source_uuid = fields.String()
name = fields.String()
interval_count = fields.Int()
interval_unit = fields.String()
external_id = fields.String()
@post_load
def make(self, data, **kwargs):
return Plan(**data)
_schema = _Schema(unknown=EXCLUDE)
|
chartmogul/chartmogul-python
|
chartmogul/api/plan.py
|
Python
|
mit
| 727
|
import os
import os.path
from django.db import transaction
from bibliotik import manage_bibliotik
from bibliotik.models import BibliotikTransTorrent, BibliotikTorrent
from home.models import LogEntry, DownloadLocation
def sync_instance_db(instance):
b_torrents = instance.get_b_torrents_by_hash()
t_torrents = instance.get_t_torrents_by_hash(BibliotikTransTorrent.sync_t_arguments)
for c_hash, b_torrent in b_torrents.items():
if c_hash not in t_torrents:
b_torrent_path = b_torrent.path.encode('utf-8')
messages = []
with transaction.atomic():
b_torrent.delete()
del b_torrents[c_hash]
if instance.replica_set.is_master:
if os.path.exists(b_torrent_path):
files = os.listdir(b_torrent_path)
if len(files):
messages.append(u'There are other files so leaving in place.')
else:
messages.append(u'No other files. Deleting directory.')
os.rmdir(b_torrent_path)
else:
messages.append(u'Path does not exist.')
LogEntry.add(None, u'action',
u'Bibliotik torrent {0} deleted from instance {1}. {2}'
.format(b_torrent, instance, ' '.join(messages)))
with transaction.atomic():
for c_hash, t_torrent in t_torrents.items():
if c_hash not in b_torrents:
LogEntry.add(None, u'error',
u'Bibliotik torrent {0} appeared in instance {1}.'
.format(t_torrent.name, instance))
break
else:
b_torrent = b_torrents[c_hash]
b_torrent.sync_t_torrent(t_torrent)
def sync_all_instances_db(replica_set):
for instance in replica_set.transinstance_set.all():
sync_instance_db(instance)
def init_sync_instance_db(instance):
b_torrents = instance.get_b_torrents_by_hash()
t_torrents = instance.get_t_torrents_by_hash(BibliotikTransTorrent.sync_t_arguments)
with transaction.atomic():
for c_hash, t_torrent in t_torrents.items():
if c_hash not in b_torrents:
try:
bibliotik_torrent = BibliotikTorrent.objects.get(info_hash=c_hash)
d_location = DownloadLocation.get_by_full_path(t_torrent.downloadDir)
b_torrent = manage_bibliotik.add_bibliotik_torrent(
bibliotik_torrent.id,
instance,
d_location,
add_to_client=False
)
b_torrents[b_torrent.info_hash] = b_torrent
except BibliotikTorrent.DoesNotExist:
raise Exception(u'Could not find hash {0} for name {1} in '
u'DB during initial sync.'
.format(c_hash, t_torrent.name))
b_torrent = b_torrents[c_hash]
b_torrent.sync_t_torrent(t_torrent)
def init_sync_all_instances_db(replica_set):
for instance in replica_set.transinstance_set.all():
init_sync_instance_db(instance)
|
davols/WhatManager2
|
bibliotik/trans_sync.py
|
Python
|
mit
| 3,346
|
# Copyright (c) 2012 Santosh Philip
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
# http://opensource.org/licenses/MIT)
# =======================================================================
"""change the edges in loopdaigram so that there are no names with colons (:) """
def replace_colon(s, replacewith='__'):
"""replace the colon with something"""
return s.replace(":", replacewith)
def clean_edges(arg):
if isinstance(arg, basestring): # Python 3: isinstance(arg, str)
return replace_colon(arg)
try:
return tuple(clean_edges(x) for x in arg)
except TypeError: # catch when for loop fails
return replace_colon(arg) # not a sequence so just return repr
# start pytests +++++++++++++++++++++++
def test_replace_colon():
"""py.test for replace_colon"""
data = (("zone:aap", '@', "zone@aap"),# s, r, replaced
)
for s, r, replaced in data:
result = replace_colon(s, r)
assert result == replaced
def test_cleanedges():
"""py.test for cleanedges"""
data = (([('a:a', 'a'), (('a', 'a'), 'a:a'), ('a:a', ('a', 'a'))],
(('a__a', 'a'), (('a', 'a'), 'a__a'), ('a__a', ('a', 'a')))),
# edg, clean_edg
)
for edg, clean_edg in data:
result = clean_edges(edg)
assert result == clean_edg
# end pytests +++++++++++++++++++++++
|
pachi/eppy
|
eppy/useful_scripts/change_edges.py
|
Python
|
mit
| 1,491
|
#encoding:utf-8
from utils import weighted_random_subreddit
t_channel = '@pythondaily'
subreddit = weighted_random_subreddit({
'flask': 3,
'Python': 6,
'django': 4,
'MachineLearning': 1,
'djangolearning': 1,
'IPython': 5,
'pystats': 4,
'JupyterNotebooks': 3
})
def send_post(submission, r2t):
info = submission.selftext
lwords = info.split(' ')[:200]
words = ' '.join(lwords)
return r2t.send_simple(submission,
text='{title}\n\n{words}\n\n/r/{subreddit_name}\n{short_link}',
gif='{title}\n\n/r/{subreddit_name}\n{short_link}',
img='{title}\n\n/r/{subreddit_name}\n{short_link}',
album='{title}\n{link}\n\n/r/{subreddit_name}\n{short_link}',
other='{title}\n{link}\n\n/r/{subreddit_name}\n{short_link}',
words=words
)
|
Fillll/reddit2telegram
|
reddit2telegram/channels/pythondaily/app.py
|
Python
|
mit
| 822
|
# Third-party
import numpy as np
import astropy.units as u
import astropy.coordinates as coord
from astropy.coordinates import frame_transform_graph
from astropy.coordinates.matrix_utilities import matrix_transpose
__all__ = ["JhelumBonaca19"]
class JhelumBonaca19(coord.BaseCoordinateFrame):
"""
A Heliocentric spherical coordinate system defined by the orbit
of the Jhelum stream, as described in
Bonaca et al. 2019.
For more information about this class, see the Astropy documentation
on coordinate frames in :mod:`~astropy.coordinates`.
Parameters
----------
representation : :class:`~astropy.coordinates.BaseRepresentation` or None
A representation object or None to have no data (or use the other keywords)
phi1 : angle_like, optional, must be keyword
The longitude-like angle aligned with the stream.
phi2 : angle_like, optional, must be keyword
The latitude-like angle aligned perpendicular to the stream.
distance : :class:`~astropy.units.Quantity`, optional, must be keyword
The Distance for this object along the line-of-sight.
pm_phi1_cosphi2 : :class:`~astropy.units.Quantity`, optional, must be keyword
The proper motion in the longitude-like direction corresponding to
the GD-1 stream's orbit.
pm_phi2 : :class:`~astropy.units.Quantity`, optional, must be keyword
The proper motion in the latitude-like direction perpendicular to the
GD-1 stream's orbit.
radial_velocity : :class:`~astropy.units.Quantity`, optional, must be keyword
The Distance for this object along the line-of-sight.
"""
default_representation = coord.SphericalRepresentation
default_differential = coord.SphericalCosLatDifferential
frame_specific_representation_info = {
coord.SphericalRepresentation: [
coord.RepresentationMapping('lon', 'phi1'),
coord.RepresentationMapping('lat', 'phi2'),
coord.RepresentationMapping('distance', 'distance')],
}
_default_wrap_angle = 180*u.deg
def __init__(self, *args, **kwargs):
wrap = kwargs.pop('wrap_longitude', True)
super().__init__(*args, **kwargs)
if wrap and isinstance(self._data, (coord.UnitSphericalRepresentation,
coord.SphericalRepresentation)):
self._data.lon.wrap_angle = self._default_wrap_angle
# TODO: remove this. This is a hack required as of astropy v3.1 in order
# to have the longitude components wrap at the desired angle
def represent_as(self, base, s='base', in_frame_units=False):
r = super().represent_as(base, s=s, in_frame_units=in_frame_units)
if hasattr(r, "lon"):
r.lon.wrap_angle = self._default_wrap_angle
return r
represent_as.__doc__ = coord.BaseCoordinateFrame.represent_as.__doc__
# Rotation matrix as defined in Bonaca+2019
R = np.array([[0.6173151074, -0.0093826715, -0.7866600433],
[-0.0151801852, -0.9998847743, 0.0000135163],
[-0.7865695266, 0.0119333013, -0.6173864075]])
@frame_transform_graph.transform(coord.StaticMatrixTransform, coord.ICRS,
JhelumBonaca19)
def icrs_to_jhelum():
""" Compute the transformation from Galactic spherical to
heliocentric Jhelum coordinates.
"""
return R
@frame_transform_graph.transform(coord.StaticMatrixTransform, JhelumBonaca19,
coord.ICRS)
def gd1_to_icrs():
""" Compute the transformation from heliocentric Jhelum coordinates to
spherical ICRS.
"""
return matrix_transpose(icrs_to_jhelum())
|
adrn/gala
|
gala/coordinates/jhelum.py
|
Python
|
mit
| 3,688
|
import optparse
class CLI:
pass
CLI.parser = optparse.OptionParser()
CLI.parser.add_option("-q", "--queries", dest="queries", help="Queries csv file", metavar="FILE")
CLI.parser.add_option("-l", "--logs", dest="logs", help="Path to log files containing directory", metavar="DIR")
CLI.parser.add_option("-r", "--results", dest="results",
help="Path to result files containing directory", metavar="DIR")
CLI.parser.add_option("-g", "--gains", dest="gains",
help="Comma-separated list of gains for different relevance levels, eg. 0,1,10", metavar="LIST")
CLI.parser.add_option("-s", "--serp-len", dest="serp_len",
help="Number of results on a single Search Engine Result Page [default: %default]",
default=10, metavar="N")
CLI.parser.add_option("-c", action="store_true", dest="use_combined_log_parser", help="Use combined log parser")
CLI.parser.add_option("-a", action="store_true", dest="use_alt_log_format", help="Use alternative log format")
CLI.parsedArgs = CLI.parser.parse_args()
|
fire-uta/iiix-data-parser
|
cli.py
|
Python
|
mit
| 1,078
|
import re
from main import sc
__author__ = 'minh'
class Utils:
def __init__(self):
pass
not_allowed_chars = '[\/*?"<>|\s\t]'
numeric_regex = r"\A((\\-)?[0-9]{1,3}(,[0-9]{3})+(\\.[0-9]+)?)|((\\-)?[0-9]*\\.[0-9]+)|((\\-)?[0-9]+)|((\\-)?[0" \
r"-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)\Z"
@staticmethod
def is_number(example):
matches = re.match(Utils.numeric_regex, example.strip())
if matches and matches.span()[1] == len(example.strip()):
return True
return False
@staticmethod
def clean_examples_numeric(examples):
return sc.parallelize(examples).map(lambda x: float(x) if Utils.is_number(x) else "").filter(
lambda x: x).collect()
@staticmethod
def get_distribution(data):
return sc.parallelize(data).map(lambda word: (word, 1)).reduceByKey(lambda a, b: a + b).sortBy(
lambda x: x).zipWithIndex().flatMap(lambda value, idx: [str(idx)] * int(value/len(data) * 100))
@staticmethod
def get_index_name(index_config):
return "%s!%s" % (index_config['name'], index_config['size'])
|
alseambusher/SemanticTyping
|
lib/utils.py
|
Python
|
mit
| 1,139
|
from avatar.templatetags.avatar_tags import avatar_url
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from tastypie import fields
from tastypie.resources import ModelResource
from accounts.models import UserProfile
from main.api.authentication import UberAuthentication
from main.api.serializers import UberSerializer
class UserResource(ModelResource):
#profile = fields.ForeignKey('accounts.api.UserProfileResource', 'profile', full=True)
class Meta:
queryset = User.objects.all()
authentication = UberAuthentication()
#authorization = CourseAuthorization()
resource_name = 'users'
fields = ['username', 'first_name', 'last_name', 'last_login', 'profile']
allowed_methods = ['get']
include_absolute_url = True
serializer = UberSerializer()
def dehydrate(self, bundle):
bundle.data['absolute_url'] = reverse('account_user_profile_with_username', kwargs={'username': bundle.obj.username})
bundle.data['best_name'] = bundle.obj.profile.get_best_name()
bundle.data['tiny_thumbnail'] = avatar_url(bundle.obj, size=settings.AVATAR_SIZE_IN_ENROLLMENTS_GRID)
return bundle
class UserProfileResource(ModelResource):
class Meta:
queryset = UserProfile.objects.all()
authentication = UberAuthentication()
resource_name = 'profiles'
|
Uberlearner/uberlearner
|
uberlearner/accounts/api/api.py
|
Python
|
mit
| 1,436
|
from csacompendium.indicators.models import ResearchOutcomeIndicator
from csacompendium.utils.pagination import APILimitOffsetPagination
from csacompendium.utils.permissions import IsOwnerOrReadOnly
from csacompendium.utils.viewsutils import DetailViewUpdateDelete, get_http_request
from rest_framework.filters import DjangoFilterBackend
from rest_framework.generics import CreateAPIView, ListAPIView
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from .filters import ResearchOutcomeIndicatorListFilter
from csacompendium.indicators.api.serializers import research_outcome_indicator_serializers
def research_outcome_indicator_views():
"""
Research outcome indicator views
:return: All research outcome indicator views
:rtype: Object
"""
class ResearchOutcomeIndicatorCreateAPIView(CreateAPIView):
"""
Creates a single record.
"""
queryset = ResearchOutcomeIndicator.objects.all()
permission_classes = [IsAuthenticated]
def get_serializer_class(self):
"""
Gets serializer class
:return: Research outcome indicator object
:rtype: Object
"""
model_type, url_parameter, user = get_http_request(self.request, slug=False)
create_research_outcome_indicator_serializer = research_outcome_indicator_serializers[
'create_research_outcome_indicator_serializer'
]
return create_research_outcome_indicator_serializer(model_type, url_parameter, user)
class ResearchOutcomeIndicatorListAPIView(ListAPIView):
"""
API list view. Gets all records API.
"""
queryset = ResearchOutcomeIndicator.objects.all()
serializer_class = research_outcome_indicator_serializers['ResearchOutcomeIndicatorListSerializer']
filter_backends = (DjangoFilterBackend,)
filter_class = ResearchOutcomeIndicatorListFilter
pagination_class = APILimitOffsetPagination
class ResearchOutcomeIndicatorDetailAPIView(DetailViewUpdateDelete):
"""
Creates, deletes and updates a record.
"""
queryset = ResearchOutcomeIndicator.objects.all()
serializer_class = research_outcome_indicator_serializers['ResearchOutcomeIndicatorDetailSerializer']
permission_classes = [IsAuthenticated, IsAdminUser]
lookup_field = 'pk'
return {
'ResearchOutcomeIndicatorCreateAPIView': ResearchOutcomeIndicatorCreateAPIView,
'ResearchOutcomeIndicatorListAPIView': ResearchOutcomeIndicatorListAPIView,
'ResearchOutcomeIndicatorDetailAPIView': ResearchOutcomeIndicatorDetailAPIView
}
|
nkoech/csacompendium
|
csacompendium/indicators/api/researchoutcomeindicator/researchoutcomeindicatorviews.py
|
Python
|
mit
| 2,693
|
# -*- coding: utf-8 -*-
"""
Terrain module providing step overlapping data containers
"""
import threading
world = threading.local() # pylint: disable=invalid-name
def pick(func):
"""
Picks the given function and add it to the world object
"""
setattr(world, func.__name__, func)
return func
world.pick = pick
|
SamuelYvon/radish
|
radish/terrain.py
|
Python
|
mit
| 346
|
from goprocam import GoProCamera, constants
gpCam = GoProCamera.GoPro()
## Downloads all of the SD card's contents and then formats the sd card.
gpCam.downloadAll()
gpCam.delete("all")
|
KonradIT/gopro-py-api
|
examples/dump_sdcard.py
|
Python
|
mit
| 186
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "badguys.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
mpirnat/lets-be-bad-guys
|
manage.py
|
Python
|
mit
| 250
|
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
#======================================================================
#
# playsnd.py - play sound with ctypes + mci
#
# Created by skywind on 2013/12/01
# Last change: 2014/01/26 23:40:20
#
#======================================================================
from __future__ import print_function
import sys
import time
import os
import ctypes
import threading
#----------------------------------------------------------------------
# 2/3 compatible
#----------------------------------------------------------------------
if sys.version_info[0] >= 3:
long = int
unicode = str
xrange = range
#----------------------------------------------------------------------
# WinMM - Windows player
#----------------------------------------------------------------------
class WinMM (object):
def __init__ (self, prefix = ''):
import ctypes.wintypes
self.__winmm = ctypes.windll.winmm
self.__mciSendString = self.__winmm.mciSendStringW
self.__prefix = prefix
LPCWSTR = ctypes.wintypes.LPCWSTR
UINT = ctypes.wintypes.UINT
HANDLE = ctypes.wintypes.HANDLE
DWORD = ctypes.wintypes.DWORD
self.__mciSendString.argtypes = [LPCWSTR, LPCWSTR, UINT, HANDLE]
self.__mciSendString.restype = ctypes.wintypes.DWORD
self.__mciGetErrorStringW = self.__winmm.mciGetErrorStringW
self.__mciGetErrorStringW.argtypes = [DWORD, LPCWSTR, UINT]
self.__mciGetErrorStringW.restype = ctypes.wintypes.BOOL
self.__buffer = ctypes.create_unicode_buffer(2048)
self.__alias_index = 0
self.__lock = threading.Lock()
def mciSendString (self, command, encoding = None):
if encoding is None:
encoding = sys.getfilesystemencoding()
if isinstance(command, bytes):
command = command.decode(encoding)
with self.__lock:
hr = self.__mciSendString(command, self.__buffer, 2048, 0)
hr = (hr != 0) and long(hr) or self.__buffer.value
return hr
def mciGetErrorString (self, error):
buffer = self.__buffer
with self.__lock:
hr = self.__mciGetErrorStringW(error, buffer, 2048)
if hr == 0:
hr = None
else:
hr = buffer.value
return hr
def open (self, filename, media_type = ''):
if not os.path.exists(filename):
return None
filename = os.path.abspath(filename)
with self.__lock:
name = 'media:%s%d'%(self.__prefix, self.__alias_index)
self.__alias_index += 1
if self.__alias_index > 0x7fffffff:
self.__alias_index = 0
cmd = u'open "%s" alias %s'%(filename, name)
if media_type:
cmd = u'open "%s" type %s alias %s'%(filename, media_type, name)
hr = self.mciSendString(cmd)
if isinstance(hr, str) or isinstance(hr, unicode):
return name
return None
def close (self, name):
hr = self.mciSendString(u'close %s'%name)
if isinstance(hr, unicode) or isinstance(hr, str):
return True
return False
def __get_status (self, name, what):
hr = self.mciSendString(u'status %s %s'%(name, what))
if isinstance(hr, unicode) or isinstance(hr, str):
return hr
return None
def __get_status_int (self, name, what):
hr = self.__get_status(name, what)
if hr is None:
return -1
hr = long(hr)
return (hr > 0x7fffffff) and hr or int(hr)
def __mci_no_return (self, cmd):
hr = self.mciSendString(cmd)
if isinstance(hr, unicode) or isinstance(hr, str):
return True
return False
def get_length (self, name):
return self.__get_status_int(name, 'length')
def get_position (self, name):
return self.__get_status_int(name, 'position')
def get_mode (self, name):
hr = self.__get_status(name, 'mode')
return hr
def play (self, name, start = 0, end = -1, wait = False, repeat = False):
if wait:
repeat = False
if start < 0:
start = 0
cmd = u'play %s from %d'%(name, start)
if end >= 0:
cmd += u' to %d'%end
if wait:
cmd += u' wait'
if repeat:
cmd += u' repeat'
return self.__mci_no_return(cmd)
def stop (self, name):
return self.__mci_no_return(u'stop %s'%name)
def seek (self, name, position):
if isinstance(position, str) or isinstance(position, unicode):
if position == u'end':
position = 'end'
else:
position = '0'
elif position < 0:
position = 'end'
else:
position = str(position)
return self.__mci_no_return(u'seek %s to %s'%name)
def pause (self, name):
return self.__mci_no_return(u'pause %s'%name)
def resume (self, name):
return self.__mci_no_return(u'resume %s'%name)
def get_volume (self, name):
return self.__get_status_int(name, 'volume')
def set_volume (self, name, volume):
return self.__mci_no_return(u'setaudio %s volume to %s'%(name, volume))
def is_playing (self, name):
mode = self.get_mode(name)
if mode is None:
return False
if mode != 'playing':
return False
return True
#----------------------------------------------------------------------
# main entry
#----------------------------------------------------------------------
def main (args = None):
if args is None:
args = sys.argv
args = [n for n in args]
if len(args) < 2:
print('usage: playmp3.py [mp3]')
return 0
mp3 = args[1]
if not os.path.exists(mp3):
print('not find: %s'%mp3)
return 1
def ms2time(ms):
if ms <= 0: return '00:00:000'
time_sec, ms = ms / 1000, ms % 1000
time_min, time_sec = time_sec / 60, time_sec % 60
time_hor, time_min = time_min / 60, time_min % 60
if time_hor == 0: return '%02d:%02d:%03d'%(time_min, time_sec, ms)
return '%02d:%02d:%02d:%03d'%(time_hor, time_min, time_sec, ms)
winmm = WinMM()
name = winmm.open(mp3)
if name is None:
print('can not play: %s'%mp3)
return 2
import ctypes.wintypes
user32 = ctypes.windll.user32
user32.GetAsyncKeyState.restype = ctypes.wintypes.WORD
user32.GetAsyncKeyState.argtypes = [ ctypes.c_char ]
size = winmm.get_length(name)
print('Playing "%s", press \'q\' to exit ....'%mp3)
winmm.play(name, repeat = True)
while 1:
if user32.GetAsyncKeyState(b'Q'): break
time.sleep(0.1)
pos = winmm.get_position(name)
sys.stdout.write('[%s / %s]\r'%(ms2time(pos), ms2time(size)))
sys.stdout.flush()
print('')
print('stopped')
winmm.close(name)
return 0
#----------------------------------------------------------------------
# testing case
#----------------------------------------------------------------------
if __name__ == '__main__':
def test1():
winmm = WinMM()
name = winmm.open('d:/music/sample.mp3')
print(name)
print(winmm.get_length(name))
print(winmm.get_volume(name))
print(winmm.set_volume(name, 1000))
ts = time.time()
print(winmm.play(name))
ts = time.time() - ts
print("ts", ts)
input()
print('is_playing', winmm.is_playing(name))
print('position:', winmm.get_position(name))
print('mode:', winmm.get_mode(name))
print(winmm.stop(name))
print('mode:', winmm.get_mode(name))
return 0
def test2():
main([__file__, 'd:/music/sample.mp3'])
return 0
# test2()
main()
|
skywind3000/collection
|
script/playmp3.py
|
Python
|
mit
| 7,109
|
import sys
file_name = sys.argv[1]
with open(file_name) as open_file:
for line in open_file.readlines():
a, b, n = map(int, line.split())
output = ""
for i in xrange(1, n + 1):
out = ""
spacing = " "
if i == 1:
spacing = ""
if i % a == 0:
out += "F"
if i % b == 0:
out += "B"
output += spacing + (out or str(i))
print output
|
Widdershin/CodeEval
|
challenges/001-fizzbuzz.py
|
Python
|
mit
| 379
|
MYSQL_DB = 'edxapp'
MYSQL_USER = 'root'
MYSQL_PSWD = ''
MONGO_DB = 'edxapp'
MONGO_DISCUSSION_DB = 'cs_comments_service_development'
|
jaygoswami2303/course_dashboard_api
|
v2/dbv.py
|
Python
|
mit
| 133
|
# coding: utf-8
"""
Gmail
Access Gmail mailboxes including sending user email.
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class UsersApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def gmail_users_drafts_create(self, user_id, **kwargs):
"""
Creates a new draft with the DRAFT label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_drafts_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_drafts_create_with_http_info(self, user_id, **kwargs):
"""
Creates a new draft with the DRAFT label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_create`")
collection_formats = {}
resource_path = '/{userId}/drafts'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Draft',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified draft. Does not simply trash it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_drafts_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_drafts_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified draft. Does not simply trash it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_drafts_delete`")
collection_formats = {}
resource_path = '/{userId}/drafts/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_get(self, user_id, id, **kwargs):
"""
Gets the specified draft.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the draft in.
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_drafts_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_drafts_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified draft.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the draft in.
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'format']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_drafts_get`")
collection_formats = {}
resource_path = '/{userId}/drafts/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'format' in params:
query_params['format'] = params['format']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Draft',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_list(self, user_id, **kwargs):
"""
Lists the drafts in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include drafts from SPAM and TRASH in the results.
:param int max_results: Maximum number of drafts to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return draft messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\".
:return: ListDraftsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_drafts_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_drafts_list_with_http_info(self, user_id, **kwargs):
"""
Lists the drafts in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include drafts from SPAM and TRASH in the results.
:param int max_results: Maximum number of drafts to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return draft messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\".
:return: ListDraftsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'include_spam_trash', 'max_results', 'page_token', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_list`")
collection_formats = {}
resource_path = '/{userId}/drafts'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'include_spam_trash' in params:
query_params['includeSpamTrash'] = params['include_spam_trash']
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'q' in params:
query_params['q'] = params['q']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListDraftsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_send(self, user_id, **kwargs):
"""
Sends the specified, existing draft to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_send(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_send_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_drafts_send_with_http_info(user_id, **kwargs)
return data
def gmail_users_drafts_send_with_http_info(self, user_id, **kwargs):
"""
Sends the specified, existing draft to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_send_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_send" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_send`")
collection_formats = {}
resource_path = '/{userId}/drafts/send'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_drafts_update(self, user_id, id, **kwargs):
"""
Replaces a draft's content.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_update(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_drafts_update_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_drafts_update_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_drafts_update_with_http_info(self, user_id, id, **kwargs):
"""
Replaces a draft's content.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_drafts_update_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the draft to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Draft body:
:return: Draft
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_drafts_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_drafts_update`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_drafts_update`")
collection_formats = {}
resource_path = '/{userId}/drafts/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Draft',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_get_profile(self, user_id, **kwargs):
"""
Gets the current user's Gmail profile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_get_profile(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Profile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_get_profile_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_get_profile_with_http_info(user_id, **kwargs)
return data
def gmail_users_get_profile_with_http_info(self, user_id, **kwargs):
"""
Gets the current user's Gmail profile.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_get_profile_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Profile
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_get_profile" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_get_profile`")
collection_formats = {}
resource_path = '/{userId}/profile'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Profile',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_history_list(self, user_id, **kwargs):
"""
Lists the history of all changes to the given mailbox. History results are returned in chronological order (increasing historyId).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_history_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param list[str] history_types: History types to be returned by the function
:param str label_id: Only return messages with a label matching the ID.
:param int max_results: The maximum number of history records to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str start_history_id: Required. Returns history records after the specified startHistoryId. The supplied startHistoryId should be obtained from the historyId of a message, thread, or previous list response. History IDs increase chronologically but are not contiguous with random gaps in between valid IDs. Supplying an invalid or out of date startHistoryId typically returns an HTTP 404 error code. A historyId is typically valid for at least a week, but in some rare circumstances may be valid for only a few hours. If you receive an HTTP 404 error response, your application should perform a full sync. If you receive no nextPageToken in the response, there are no updates to retrieve and you can store the returned historyId for a future request.
:return: ListHistoryResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_history_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_history_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_history_list_with_http_info(self, user_id, **kwargs):
"""
Lists the history of all changes to the given mailbox. History results are returned in chronological order (increasing historyId).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_history_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param list[str] history_types: History types to be returned by the function
:param str label_id: Only return messages with a label matching the ID.
:param int max_results: The maximum number of history records to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str start_history_id: Required. Returns history records after the specified startHistoryId. The supplied startHistoryId should be obtained from the historyId of a message, thread, or previous list response. History IDs increase chronologically but are not contiguous with random gaps in between valid IDs. Supplying an invalid or out of date startHistoryId typically returns an HTTP 404 error code. A historyId is typically valid for at least a week, but in some rare circumstances may be valid for only a few hours. If you receive an HTTP 404 error response, your application should perform a full sync. If you receive no nextPageToken in the response, there are no updates to retrieve and you can store the returned historyId for a future request.
:return: ListHistoryResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'history_types', 'label_id', 'max_results', 'page_token', 'start_history_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_history_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_history_list`")
collection_formats = {}
resource_path = '/{userId}/history'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'history_types' in params:
query_params['historyTypes'] = params['history_types']
collection_formats['historyTypes'] = 'multi'
if 'label_id' in params:
query_params['labelId'] = params['label_id']
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'start_history_id' in params:
query_params['startHistoryId'] = params['start_history_id']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListHistoryResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_create(self, user_id, **kwargs):
"""
Creates a new label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_labels_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_labels_create_with_http_info(self, user_id, **kwargs):
"""
Creates a new label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_create`")
collection_formats = {}
resource_path = '/{userId}/labels'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified label and removes it from any messages and threads that it is applied to.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified label and removes it from any messages and threads that it is applied to.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_delete`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_get(self, user_id, id, **kwargs):
"""
Gets the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_get`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_list(self, user_id, **kwargs):
"""
Lists all labels in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListLabelsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_labels_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_labels_list_with_http_info(self, user_id, **kwargs):
"""
Lists all labels in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListLabelsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_list`")
collection_formats = {}
resource_path = '/{userId}/labels'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListLabelsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_patch(self, user_id, id, **kwargs):
"""
Updates the specified label. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_patch(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_patch_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_patch_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_patch_with_http_info(self, user_id, id, **kwargs):
"""
Updates the specified label. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_patch_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_patch`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_patch`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_labels_update(self, user_id, id, **kwargs):
"""
Updates the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_update(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_labels_update_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_labels_update_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_labels_update_with_http_info(self, user_id, id, **kwargs):
"""
Updates the specified label.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_labels_update_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the label to update. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Label body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_labels_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_labels_update`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_labels_update`")
collection_formats = {}
resource_path = '/{userId}/labels/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_attachments_get(self, user_id, message_id, id, **kwargs):
"""
Gets the specified message attachment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_attachments_get(user_id, message_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str message_id: The ID of the message containing the attachment. (required)
:param str id: The ID of the attachment. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: MessagePartBody
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_attachments_get_with_http_info(user_id, message_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_attachments_get_with_http_info(user_id, message_id, id, **kwargs)
return data
def gmail_users_messages_attachments_get_with_http_info(self, user_id, message_id, id, **kwargs):
"""
Gets the specified message attachment.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_attachments_get_with_http_info(user_id, message_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str message_id: The ID of the message containing the attachment. (required)
:param str id: The ID of the attachment. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: MessagePartBody
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'message_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_attachments_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_attachments_get`")
# verify the required parameter 'message_id' is set
if ('message_id' not in params) or (params['message_id'] is None):
raise ValueError("Missing the required parameter `message_id` when calling `gmail_users_messages_attachments_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_attachments_get`")
collection_formats = {}
resource_path = '/{userId}/messages/{messageId}/attachments/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'message_id' in params:
path_params['messageId'] = params['message_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MessagePartBody',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_batch_delete(self, user_id, **kwargs):
"""
Deletes many messages by message ID. Provides no guarantees that messages were not already deleted or even existed at all.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_delete(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchDeleteMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_batch_delete_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_batch_delete_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_batch_delete_with_http_info(self, user_id, **kwargs):
"""
Deletes many messages by message ID. Provides no guarantees that messages were not already deleted or even existed at all.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_delete_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchDeleteMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_batch_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_batch_delete`")
collection_formats = {}
resource_path = '/{userId}/messages/batchDelete'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_batch_modify(self, user_id, **kwargs):
"""
Modifies the labels on the specified messages.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_modify(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchModifyMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_batch_modify_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_batch_modify_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_batch_modify_with_http_info(self, user_id, **kwargs):
"""
Modifies the labels on the specified messages.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_batch_modify_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param BatchModifyMessagesRequest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_batch_modify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_batch_modify`")
collection_formats = {}
resource_path = '/{userId}/messages/batchModify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified message. This operation cannot be undone. Prefer messages.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified message. This operation cannot be undone. Prefer messages.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_delete`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_get(self, user_id, id, **kwargs):
"""
Gets the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the message in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the message in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'format', 'metadata_headers']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_get`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'format' in params:
query_params['format'] = params['format']
if 'metadata_headers' in params:
query_params['metadataHeaders'] = params['metadata_headers']
collection_formats['metadataHeaders'] = 'multi'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_import(self, user_id, **kwargs):
"""
Imports a message into only this user's mailbox, with standard email delivery scanning and classification similar to receiving via SMTP. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_import(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param bool never_mark_spam: Ignore the Gmail spam classifier decision and never mark this email as SPAM in the mailbox.
:param bool process_for_calendar: Process calendar invites in the email and add any extracted meetings to the Google Calendar for this user.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_import_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_import_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_import_with_http_info(self, user_id, **kwargs):
"""
Imports a message into only this user's mailbox, with standard email delivery scanning and classification similar to receiving via SMTP. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_import_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param bool never_mark_spam: Ignore the Gmail spam classifier decision and never mark this email as SPAM in the mailbox.
:param bool process_for_calendar: Process calendar invites in the email and add any extracted meetings to the Google Calendar for this user.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'deleted', 'internal_date_source', 'never_mark_spam', 'process_for_calendar', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_import" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_import`")
collection_formats = {}
resource_path = '/{userId}/messages/import'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'deleted' in params:
query_params['deleted'] = params['deleted']
if 'internal_date_source' in params:
query_params['internalDateSource'] = params['internal_date_source']
if 'never_mark_spam' in params:
query_params['neverMarkSpam'] = params['never_mark_spam']
if 'process_for_calendar' in params:
query_params['processForCalendar'] = params['process_for_calendar']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_insert(self, user_id, **kwargs):
"""
Directly inserts a message into only this user's mailbox similar to IMAP APPEND, bypassing most scanning and classification. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_insert(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_insert_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_insert_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_insert_with_http_info(self, user_id, **kwargs):
"""
Directly inserts a message into only this user's mailbox similar to IMAP APPEND, bypassing most scanning and classification. Does not send a message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_insert_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool deleted: Mark the email as permanently deleted (not TRASH) and only visible in Google Vault to a Vault administrator. Only used for G Suite accounts.
:param str internal_date_source: Source for Gmail's internal date of the message.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'deleted', 'internal_date_source', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_insert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_insert`")
collection_formats = {}
resource_path = '/{userId}/messages'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'deleted' in params:
query_params['deleted'] = params['deleted']
if 'internal_date_source' in params:
query_params['internalDateSource'] = params['internal_date_source']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_list(self, user_id, **kwargs):
"""
Lists the messages in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include messages from SPAM and TRASH in the results.
:param list[str] label_ids: Only return messages with labels that match all of the specified label IDs.
:param int max_results: Maximum number of messages to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListMessagesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_list_with_http_info(self, user_id, **kwargs):
"""
Lists the messages in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include messages from SPAM and TRASH in the results.
:param list[str] label_ids: Only return messages with labels that match all of the specified label IDs.
:param int max_results: Maximum number of messages to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return messages matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListMessagesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'include_spam_trash', 'label_ids', 'max_results', 'page_token', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_list`")
collection_formats = {}
resource_path = '/{userId}/messages'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'include_spam_trash' in params:
query_params['includeSpamTrash'] = params['include_spam_trash']
if 'label_ids' in params:
query_params['labelIds'] = params['label_ids']
collection_formats['labelIds'] = 'multi'
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'q' in params:
query_params['q'] = params['q']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListMessagesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_modify(self, user_id, id, **kwargs):
"""
Modifies the labels on the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_modify(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyMessageRequest body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_modify_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_modify_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_modify_with_http_info(self, user_id, id, **kwargs):
"""
Modifies the labels on the specified message.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_modify_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyMessageRequest body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_modify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_modify`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_modify`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}/modify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_send(self, user_id, **kwargs):
"""
Sends the specified message to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_send(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_send_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_messages_send_with_http_info(user_id, **kwargs)
return data
def gmail_users_messages_send_with_http_info(self, user_id, **kwargs):
"""
Sends the specified message to the recipients in the To, Cc, and Bcc headers.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_send_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Message body:
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_send" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_send`")
collection_formats = {}
resource_path = '/{userId}/messages/send'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['message/rfc822'])
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_trash(self, user_id, id, **kwargs):
"""
Moves the specified message to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_trash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_trash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_trash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_trash_with_http_info(self, user_id, id, **kwargs):
"""
Moves the specified message to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_trash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_trash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_trash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_trash`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}/trash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_messages_untrash(self, user_id, id, **kwargs):
"""
Removes the specified message from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_untrash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_messages_untrash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_messages_untrash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_messages_untrash_with_http_info(self, user_id, id, **kwargs):
"""
Removes the specified message from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_messages_untrash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the message to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Message
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_messages_untrash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_messages_untrash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_messages_untrash`")
collection_formats = {}
resource_path = '/{userId}/messages/{id}/untrash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Message',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_create(self, user_id, **kwargs):
"""
Creates a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Filter body:
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_filters_create_with_http_info(self, user_id, **kwargs):
"""
Creates a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param Filter body:
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_create`")
collection_formats = {}
resource_path = '/{userId}/settings/filters'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Filter',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_delete(self, user_id, id, **kwargs):
"""
Deletes a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_settings_filters_delete_with_http_info(self, user_id, id, **kwargs):
"""
Deletes a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_filters_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/filters/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_get(self, user_id, id, **kwargs):
"""
Gets a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be fetched. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_settings_filters_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets a filter.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str id: The ID of the filter to be fetched. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Filter
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_filters_get`")
collection_formats = {}
resource_path = '/{userId}/settings/filters/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Filter',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_filters_list(self, user_id, **kwargs):
"""
Lists the message filters of a Gmail user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListFiltersResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_filters_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_filters_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_filters_list_with_http_info(self, user_id, **kwargs):
"""
Lists the message filters of a Gmail user.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_filters_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListFiltersResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_filters_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_filters_list`")
collection_formats = {}
resource_path = '/{userId}/settings/filters'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListFiltersResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_create(self, user_id, **kwargs):
"""
Creates a forwarding address. If ownership verification is required, a message will be sent to the recipient and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ForwardingAddress body:
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_create_with_http_info(self, user_id, **kwargs):
"""
Creates a forwarding address. If ownership verification is required, a message will be sent to the recipient and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ForwardingAddress body:
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_create`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ForwardingAddress',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_delete(self, user_id, forwarding_email, **kwargs):
"""
Deletes the specified forwarding address and revokes any verification that may have been required. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_delete(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_delete_with_http_info(user_id, forwarding_email, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_delete_with_http_info(user_id, forwarding_email, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_delete_with_http_info(self, user_id, forwarding_email, **kwargs):
"""
Deletes the specified forwarding address and revokes any verification that may have been required. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_delete_with_http_info(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'forwarding_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_delete`")
# verify the required parameter 'forwarding_email' is set
if ('forwarding_email' not in params) or (params['forwarding_email'] is None):
raise ValueError("Missing the required parameter `forwarding_email` when calling `gmail_users_settings_forwarding_addresses_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses/{forwardingEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'forwarding_email' in params:
path_params['forwardingEmail'] = params['forwarding_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_get(self, user_id, forwarding_email, **kwargs):
"""
Gets the specified forwarding address.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_get(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_get_with_http_info(user_id, forwarding_email, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_get_with_http_info(user_id, forwarding_email, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_get_with_http_info(self, user_id, forwarding_email, **kwargs):
"""
Gets the specified forwarding address.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_get_with_http_info(user_id, forwarding_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str forwarding_email: The forwarding address to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ForwardingAddress
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'forwarding_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_get`")
# verify the required parameter 'forwarding_email' is set
if ('forwarding_email' not in params) or (params['forwarding_email'] is None):
raise ValueError("Missing the required parameter `forwarding_email` when calling `gmail_users_settings_forwarding_addresses_get`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses/{forwardingEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'forwarding_email' in params:
path_params['forwardingEmail'] = params['forwarding_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ForwardingAddress',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_forwarding_addresses_list(self, user_id, **kwargs):
"""
Lists the forwarding addresses for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListForwardingAddressesResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_forwarding_addresses_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_forwarding_addresses_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_forwarding_addresses_list_with_http_info(self, user_id, **kwargs):
"""
Lists the forwarding addresses for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_forwarding_addresses_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListForwardingAddressesResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_forwarding_addresses_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_forwarding_addresses_list`")
collection_formats = {}
resource_path = '/{userId}/settings/forwardingAddresses'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListForwardingAddressesResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_auto_forwarding(self, user_id, **kwargs):
"""
Gets the auto-forwarding setting for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_auto_forwarding(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_auto_forwarding_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_auto_forwarding_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_auto_forwarding_with_http_info(self, user_id, **kwargs):
"""
Gets the auto-forwarding setting for the specified account.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_auto_forwarding_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_auto_forwarding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_auto_forwarding`")
collection_formats = {}
resource_path = '/{userId}/settings/autoForwarding'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutoForwarding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_imap(self, user_id, **kwargs):
"""
Gets IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_imap(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_imap_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_imap_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_imap_with_http_info(self, user_id, **kwargs):
"""
Gets IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_imap_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_imap" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_imap`")
collection_formats = {}
resource_path = '/{userId}/settings/imap'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImapSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_pop(self, user_id, **kwargs):
"""
Gets POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_pop(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_pop_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_pop_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_pop_with_http_info(self, user_id, **kwargs):
"""
Gets POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_pop_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_pop" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_pop`")
collection_formats = {}
resource_path = '/{userId}/settings/pop'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PopSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_get_vacation(self, user_id, **kwargs):
"""
Gets vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_vacation(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_get_vacation_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_get_vacation_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_get_vacation_with_http_info(self, user_id, **kwargs):
"""
Gets vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_get_vacation_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_get_vacation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_get_vacation`")
collection_formats = {}
resource_path = '/{userId}/settings/vacation'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VacationSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_create(self, user_id, **kwargs):
"""
Creates a custom \"from\" send-as alias. If an SMTP MSA is specified, Gmail will attempt to connect to the SMTP service to validate the configuration before creating the alias. If ownership verification is required for the alias, a message will be sent to the email address and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_create(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_create_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_create_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_send_as_create_with_http_info(self, user_id, **kwargs):
"""
Creates a custom \"from\" send-as alias. If an SMTP MSA is specified, Gmail will attempt to connect to the SMTP service to validate the configuration before creating the alias. If ownership verification is required for the alias, a message will be sent to the email address and the resource's verification status will be set to pending; otherwise, the resource will be created with verification status set to accepted. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_create_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_create`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_delete(self, user_id, send_as_email, **kwargs):
"""
Deletes the specified send-as alias. Revokes any verification that may have been required for using it. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_delete(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_delete_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_delete_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_delete_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Deletes the specified send-as alias. Revokes any verification that may have been required for using it. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_delete_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be deleted. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_delete`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_get(self, user_id, send_as_email, **kwargs):
"""
Gets the specified send-as alias. Fails with an HTTP 404 error if the specified address is not a member of the collection.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_get(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_get_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_get_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_get_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Gets the specified send-as alias. Fails with an HTTP 404 error if the specified address is not a member of the collection.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_get_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be retrieved. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_get`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_get`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_list(self, user_id, **kwargs):
"""
Lists the send-as aliases for the specified account. The result includes the primary send-as address associated with the account as well as any custom \"from\" aliases.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSendAsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_send_as_list_with_http_info(self, user_id, **kwargs):
"""
Lists the send-as aliases for the specified account. The result includes the primary send-as address associated with the account as well as any custom \"from\" aliases.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSendAsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_list`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListSendAsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_patch(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_patch(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_patch_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_patch_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_patch_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority. This method supports patch semantics.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_patch_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_patch`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_patch`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_delete(self, user_id, send_as_email, id, **kwargs):
"""
Deletes the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_delete(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_delete_with_http_info(user_id, send_as_email, id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_delete_with_http_info(user_id, send_as_email, id, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_delete_with_http_info(self, user_id, send_as_email, id, **kwargs):
"""
Deletes the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_delete_with_http_info(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_delete`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_send_as_smime_info_delete`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_get(self, user_id, send_as_email, id, **kwargs):
"""
Gets the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_get(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_get_with_http_info(user_id, send_as_email, id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_get_with_http_info(user_id, send_as_email, id, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_get_with_http_info(self, user_id, send_as_email, id, **kwargs):
"""
Gets the specified S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_get_with_http_info(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_get`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_send_as_smime_info_get`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SmimeInfo',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_insert(self, user_id, send_as_email, **kwargs):
"""
Insert (upload) the given S/MIME config for the specified send-as alias. Note that pkcs12 format is required for the key.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_insert(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SmimeInfo body:
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_insert_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_insert_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_insert_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Insert (upload) the given S/MIME config for the specified send-as alias. Note that pkcs12 format is required for the key.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_insert_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SmimeInfo body:
:return: SmimeInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_insert" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_insert`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_insert`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SmimeInfo',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_list(self, user_id, send_as_email, **kwargs):
"""
Lists S/MIME configs for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_list(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSmimeInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_list_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_list_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_list_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Lists S/MIME configs for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_list_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: ListSmimeInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_list`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_list`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListSmimeInfoResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_smime_info_set_default(self, user_id, send_as_email, id, **kwargs):
"""
Sets the default S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_set_default(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_smime_info_set_default_with_http_info(user_id, send_as_email, id, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_smime_info_set_default_with_http_info(user_id, send_as_email, id, **kwargs)
return data
def gmail_users_settings_send_as_smime_info_set_default_with_http_info(self, user_id, send_as_email, id, **kwargs):
"""
Sets the default S/MIME config for the specified send-as alias.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_smime_info_set_default_with_http_info(user_id, send_as_email, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str send_as_email: The email address that appears in the \"From:\" header for mail sent using this alias. (required)
:param str id: The immutable ID for the SmimeInfo. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_smime_info_set_default" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_smime_info_set_default`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_smime_info_set_default`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_settings_send_as_smime_info_set_default`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/smimeInfo/{id}/setDefault'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_update(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_update(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_update_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_update_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_update_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Updates a send-as alias. If a signature is provided, Gmail will sanitize the HTML before saving it with the alias. Addresses other than the primary address for the account can only be updated by service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_update_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be updated. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param SendAs body:
:return: SendAs
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_update`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_update`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SendAs',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_send_as_verify(self, user_id, send_as_email, **kwargs):
"""
Sends a verification email to the specified send-as alias address. The verification status must be pending. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_verify(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be verified. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_send_as_verify_with_http_info(user_id, send_as_email, **kwargs)
else:
(data) = self.gmail_users_settings_send_as_verify_with_http_info(user_id, send_as_email, **kwargs)
return data
def gmail_users_settings_send_as_verify_with_http_info(self, user_id, send_as_email, **kwargs):
"""
Sends a verification email to the specified send-as alias address. The verification status must be pending. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_send_as_verify_with_http_info(user_id, send_as_email, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str send_as_email: The send-as alias to be verified. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'send_as_email', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_send_as_verify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_send_as_verify`")
# verify the required parameter 'send_as_email' is set
if ('send_as_email' not in params) or (params['send_as_email'] is None):
raise ValueError("Missing the required parameter `send_as_email` when calling `gmail_users_settings_send_as_verify`")
collection_formats = {}
resource_path = '/{userId}/settings/sendAs/{sendAsEmail}/verify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'send_as_email' in params:
path_params['sendAsEmail'] = params['send_as_email']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_auto_forwarding(self, user_id, **kwargs):
"""
Updates the auto-forwarding setting for the specified account. A verified forwarding address must be specified when auto-forwarding is enabled. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_auto_forwarding(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param AutoForwarding body:
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_auto_forwarding_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_auto_forwarding_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_auto_forwarding_with_http_info(self, user_id, **kwargs):
"""
Updates the auto-forwarding setting for the specified account. A verified forwarding address must be specified when auto-forwarding is enabled. This method is only available to service account clients that have been delegated domain-wide authority.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_auto_forwarding_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param AutoForwarding body:
:return: AutoForwarding
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_auto_forwarding" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_auto_forwarding`")
collection_formats = {}
resource_path = '/{userId}/settings/autoForwarding'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AutoForwarding',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_imap(self, user_id, **kwargs):
"""
Updates IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_imap(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ImapSettings body:
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_imap_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_imap_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_imap_with_http_info(self, user_id, **kwargs):
"""
Updates IMAP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_imap_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ImapSettings body:
:return: ImapSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_imap" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_imap`")
collection_formats = {}
resource_path = '/{userId}/settings/imap'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ImapSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_pop(self, user_id, **kwargs):
"""
Updates POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_pop(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param PopSettings body:
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_pop_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_pop_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_pop_with_http_info(self, user_id, **kwargs):
"""
Updates POP settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_pop_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param PopSettings body:
:return: PopSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_pop" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_pop`")
collection_formats = {}
resource_path = '/{userId}/settings/pop'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PopSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_settings_update_vacation(self, user_id, **kwargs):
"""
Updates vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_vacation(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param VacationSettings body:
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_settings_update_vacation_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_settings_update_vacation_with_http_info(user_id, **kwargs)
return data
def gmail_users_settings_update_vacation_with_http_info(self, user_id, **kwargs):
"""
Updates vacation responder settings.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_settings_update_vacation_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: User's email address. The special value \"me\" can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param VacationSettings body:
:return: VacationSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_settings_update_vacation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_settings_update_vacation`")
collection_formats = {}
resource_path = '/{userId}/settings/vacation'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VacationSettings',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_stop(self, user_id, **kwargs):
"""
Stop receiving push notifications for the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_stop(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_stop_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_stop_with_http_info(user_id, **kwargs)
return data
def gmail_users_stop_with_http_info(self, user_id, **kwargs):
"""
Stop receiving push notifications for the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_stop_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_stop" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_stop`")
collection_formats = {}
resource_path = '/{userId}/stop'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_delete(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified thread. This operation cannot be undone. Prefer threads.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_delete(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: ID of the Thread to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_delete_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_delete_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_delete_with_http_info(self, user_id, id, **kwargs):
"""
Immediately and permanently deletes the specified thread. This operation cannot be undone. Prefer threads.trash instead.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_delete_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: ID of the Thread to delete. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_delete`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_delete`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_get(self, user_id, id, **kwargs):
"""
Gets the specified thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_get(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the messages in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_get_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_get_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_get_with_http_info(self, user_id, id, **kwargs):
"""
Gets the specified thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_get_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to retrieve. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param str format: The format to return the messages in.
:param list[str] metadata_headers: When given and format is METADATA, only include headers specified.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'format', 'metadata_headers']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_get`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_get`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'format' in params:
query_params['format'] = params['format']
if 'metadata_headers' in params:
query_params['metadataHeaders'] = params['metadata_headers']
collection_formats['metadataHeaders'] = 'multi'
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_list(self, user_id, **kwargs):
"""
Lists the threads in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_list(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include threads from SPAM and TRASH in the results.
:param list[str] label_ids: Only return threads with labels that match all of the specified label IDs.
:param int max_results: Maximum number of threads to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return threads matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListThreadsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_list_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_threads_list_with_http_info(user_id, **kwargs)
return data
def gmail_users_threads_list_with_http_info(self, user_id, **kwargs):
"""
Lists the threads in the user's mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_list_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param bool include_spam_trash: Include threads from SPAM and TRASH in the results.
:param list[str] label_ids: Only return threads with labels that match all of the specified label IDs.
:param int max_results: Maximum number of threads to return.
:param str page_token: Page token to retrieve a specific page of results in the list.
:param str q: Only return threads matching the specified query. Supports the same query format as the Gmail search box. For example, \"from:someuser@example.com rfc822msgid: is:unread\". Parameter cannot be used when accessing the api using the gmail.metadata scope.
:return: ListThreadsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'include_spam_trash', 'label_ids', 'max_results', 'page_token', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_list" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_list`")
collection_formats = {}
resource_path = '/{userId}/threads'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
if 'include_spam_trash' in params:
query_params['includeSpamTrash'] = params['include_spam_trash']
if 'label_ids' in params:
query_params['labelIds'] = params['label_ids']
collection_formats['labelIds'] = 'multi'
if 'max_results' in params:
query_params['maxResults'] = params['max_results']
if 'page_token' in params:
query_params['pageToken'] = params['page_token']
if 'q' in params:
query_params['q'] = params['q']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListThreadsResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_modify(self, user_id, id, **kwargs):
"""
Modifies the labels applied to the thread. This applies to all messages in the thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_modify(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyThreadRequest body:
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_modify_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_modify_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_modify_with_http_info(self, user_id, id, **kwargs):
"""
Modifies the labels applied to the thread. This applies to all messages in the thread.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_modify_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to modify. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param ModifyThreadRequest body:
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_modify" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_modify`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_modify`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}/modify'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_trash(self, user_id, id, **kwargs):
"""
Moves the specified thread to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_trash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_trash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_trash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_trash_with_http_info(self, user_id, id, **kwargs):
"""
Moves the specified thread to the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_trash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_trash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_trash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_trash`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}/trash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_threads_untrash(self, user_id, id, **kwargs):
"""
Removes the specified thread from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_untrash(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_threads_untrash_with_http_info(user_id, id, **kwargs)
else:
(data) = self.gmail_users_threads_untrash_with_http_info(user_id, id, **kwargs)
return data
def gmail_users_threads_untrash_with_http_info(self, user_id, id, **kwargs):
"""
Removes the specified thread from the trash.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_threads_untrash_with_http_info(user_id, id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str id: The ID of the thread to remove from Trash. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:return: Thread
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_threads_untrash" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_threads_untrash`")
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `gmail_users_threads_untrash`")
collection_formats = {}
resource_path = '/{userId}/threads/{id}/untrash'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
if 'id' in params:
path_params['id'] = params['id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Thread',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def gmail_users_watch(self, user_id, **kwargs):
"""
Set up or update a push notification watch on the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_watch(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param WatchRequest body:
:return: WatchResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.gmail_users_watch_with_http_info(user_id, **kwargs)
else:
(data) = self.gmail_users_watch_with_http_info(user_id, **kwargs)
return data
def gmail_users_watch_with_http_info(self, user_id, **kwargs):
"""
Set up or update a push notification watch on the given user mailbox.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.gmail_users_watch_with_http_info(user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str user_id: The user's email address. The special value me can be used to indicate the authenticated user. (required)
:param str alt: Data format for the response.
:param str fields: Selector specifying which fields to include in a partial response.
:param str key: API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
:param str oauth_token: OAuth 2.0 token for the current user.
:param bool pretty_print: Returns response with indentations and line breaks.
:param str quota_user: Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters. Overrides userIp if both are provided.
:param str user_ip: IP address of the site where the request originates. Use this if you want to enforce per-user limits.
:param WatchRequest body:
:return: WatchResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['user_id', 'alt', 'fields', 'key', 'oauth_token', 'pretty_print', 'quota_user', 'user_ip', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method gmail_users_watch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `gmail_users_watch`")
collection_formats = {}
resource_path = '/{userId}/watch'.replace('{format}', 'json')
path_params = {}
if 'user_id' in params:
path_params['userId'] = params['user_id']
query_params = {}
if 'alt' in params:
query_params['alt'] = params['alt']
if 'fields' in params:
query_params['fields'] = params['fields']
if 'key' in params:
query_params['key'] = params['key']
if 'oauth_token' in params:
query_params['oauth_token'] = params['oauth_token']
if 'pretty_print' in params:
query_params['prettyPrint'] = params['pretty_print']
if 'quota_user' in params:
query_params['quotaUser'] = params['quota_user']
if 'user_ip' in params:
query_params['userIp'] = params['user_ip']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# Authentication setting
auth_settings = ['Oauth2']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='WatchResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
CanopyIQ/gmail_client
|
gmail_client/apis/users_api.py
|
Python
|
mit
| 459,442
|
#!/usr/bin/env python
from __future__ import print_function
import boto3
import time
from botocore.exceptions import ClientError
from datetime import datetime
def get_unix_timestamp():
"""
Generate a Unix timestamp string.
"""
d = datetime.now()
t = time.mktime(d.timetuple())
return str(int(t))
def lambda_handler(event, context):
"""
Create EBS AMI for instances identified by the filter.
"""
if not 'DryRun' in event:
event['DryRun'] = False
if not 'Filters' in event:
event['Filters'] = [{
'Name': 'tag-key',
'Values': ['ops:snapshot']
}]
ec2 = boto3.resource('ec2')
# Iterate through instances identified by the filter.
for instance in ec2.instances.filter(Filters=event['Filters']):
instance_name = instance.instance_id
instance_tags = []
# If a Name tag is available, use it to identify the instance
# instead of the instance_id.
for tag in instance.tags:
if tag['Key'] == 'Name' and tag['Value'] != '':
instance_name = tag['Value']
else:
instance_tags.append(tag)
try:
# Create the AMI
image_name = instance_name + '-' + get_unix_timestamp()
image = instance.create_image(
Name=image_name,
NoReboot=True,
DryRun=event['DryRun']
)
print('Started image creation: ' + image_name)
image_tags = [{'Key': 'ops:retention', 'Value': '30'}] + instance_tags
image.create_tags(
Tags=image_tags,
DryRun=event['DryRun']
)
except ClientError as e:
if e.response['Error']['Code'] == 'DryRunOperation':
pass
|
meredith-digops/awsops
|
amicreation/amicreation.py
|
Python
|
mit
| 1,866
|
from allauth.socialaccount import providers
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth.provider import OAuthProvider
from allauth.socialaccount.models import SocialApp, SocialToken
import oauth2 as oauth
import urllib, urllib2, json
class TwitterAccount(ProviderAccount):
def get_screen_name(self):
return self.account.extra_data.get('screen_name')
def get_profile_url(self):
ret = None
screen_name = self.get_screen_name()
if screen_name:
ret = 'http://twitter.com/' + screen_name
return ret
def get_avatar_url(self):
ret = None
profile_image_url = self.account.extra_data.get('profile_image_url')
if profile_image_url:
# Hmm, hack to get our hands on the large image. Not
# really documented, but seems to work.
ret = profile_image_url.replace('_normal', '')
return ret
def has_valid_authentication(self):
account = self.account
app = SocialApp.objects.get_current(self.account.get_provider().id)
tokens = SocialToken.objects.filter(app=app, account=account).order_by('-id')
if tokens:
token = tokens[0]
consumer = oauth.Consumer(key=app.key, secret=app.secret)
access_token = oauth.Token(key=token.token, secret=token.token_secret)
client = oauth.Client(consumer, access_token)
try:
response, data = client.request('https://api.twitter.com/1.1/account/verify_credentials.json')
return True
except urllib2.HTTPError:
return False
return False
def request_url(self, url, args={}, callback=None):
account = self.account
app = SocialApp.objects.get_current(self.account.get_provider().id)
tokens = SocialToken.objects.filter(app=app, account=account).order_by('-id')
if tokens:
token = tokens[0]
consumer = oauth.Consumer(key=app.key, secret=app.secret)
access_token = oauth.Token(key=token.token, secret=token.token_secret)
client = oauth.Client(consumer, access_token)
full_url = '%s?%s' % (url, urllib.urlencode(args))
response, data = client.request(full_url)
if callback: callback(full_url, data)
return json.loads(data)
return None
def __unicode__(self):
screen_name = self.get_screen_name()
return screen_name or super(TwitterAccount, self).__unicode__()
class TwitterProvider(OAuthProvider):
id = 'twitter'
name = 'Twitter'
package = 'allauth.socialaccount.providers.twitter'
account_class = TwitterAccount
providers.registry.register(TwitterProvider)
|
rawjam/django-allauth
|
allauth/socialaccount/providers/twitter/provider.py
|
Python
|
mit
| 2,869
|
#!/usr/bin/env python
import json
import os
import consul
import sys
import time
from alertaclient.api import Client
CONSUL_HOST = os.environ.get('CONSUL_HOST', '127.0.0.1')
CONSUL_PORT = int(os.environ.get('CONSUL_PORT', 8500))
client = consul.Consul(host=CONSUL_HOST, port=CONSUL_PORT, token=None, scheme='http', consistency='default', dc=None, verify=True)
j = json.load(sys.stdin)
print("Request:")
print(j)
try:
url = client.kv.get('alerta/apiurl')[1]['Value']
except Exception:
print("No URL defined, exiting")
sys.exit(1)
try:
key = client.kv.get('alerta/apikey')[1]['Value']
except Exception:
print("No key defined, exiting")
sys.exit(1)
try:
max_retries = int(client.kv.get('alerta/max_retries')[1]['Value'])
except TypeError:
print("No value defined, using default")
max_retries = 3
try:
sleep = int(client.kv.get('alerta/sleep')[1]['Value'])
except TypeError:
print("No value defined, using default")
sleep = 2
try:
timeout = int(client.kv.get('alerta/timeout')[1]['Value'])
except TypeError:
print("No value defined, using default")
timeout = 900
try:
origin = client.kv.get('alerta/origin')[1]['Value']
except TypeError:
print("No value defined, using default")
origin = "consul"
try:
alerttype = client.kv.get('alerta/alerttype')[1]['Value']
except TypeError:
print("No value defined, using default")
alerttype = "ConsulAlert"
api = Client(endpoint=url, key=key)
SEVERITY_MAP = {
'critical': 'critical',
'warning': 'warning',
'passing': 'ok',
}
def createalert( data ):
try:
environment = client.kv.get('alerta/env/{0}'.format(data['Node']))[1]['Value']
except Exception:
try:
environment = client.kv.get('alerta/defaultenv')[1]['Value']
except Exception:
environment = "Production"
for _ in range(max_retries):
try:
print("Response:")
response = api.send_alert(
resource=data['Node'],
event=data['CheckId'],
value=data['Status'],
correlate=SEVERITY_MAP.keys(),
environment=environment,
service=[data['CheckId']],
severity=SEVERITY_MAP[data['Status']],
text=data['Output'],
timeout=timeout,
origin=origin,
type=alerttype
)
print(response)
except Exception as e:
print("HTTP Error: {}".format(e))
time.sleep(sleep)
continue
else:
break
else:
print("api is down")
def main():
for item in enumerate(j):
i=item[0]
createalert(j[i])
if __name__ == "__main__":
main()
|
alerta/alerta-contrib
|
integrations/consul/consulalerta.py
|
Python
|
mit
| 2,770
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class AccessKeyName(str, Enum):
key1 = "key1"
key2 = "key2"
class CheckNameReason(str, Enum):
unavailable = "Unavailable"
invalid = "Invalid"
|
Azure/azure-sdk-for-python
|
sdk/powerbiembedded/azure-mgmt-powerbiembedded/azure/mgmt/powerbiembedded/models/power_bi_embedded_management_client_enums.py
|
Python
|
mit
| 660
|
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
from typing import List, Literal, Optional, TypedDict
from .user import User
from .snowflake import Snowflake
StatusType = Literal['idle', 'dnd', 'online', 'offline']
class PartialPresenceUpdate(TypedDict):
user: User
guild_id: Snowflake
status: StatusType
activities: List[Activity]
client_status: ClientStatus
class ClientStatus(TypedDict, total=False):
desktop: StatusType
mobile: StatusType
web: StatusType
class ActivityTimestamps(TypedDict, total=False):
start: int
end: int
class ActivityParty(TypedDict, total=False):
id: str
size: List[int]
class ActivityAssets(TypedDict, total=False):
large_image: str
large_text: str
small_image: str
small_text: str
class ActivitySecrets(TypedDict, total=False):
join: str
spectate: str
match: str
class _ActivityEmojiOptional(TypedDict, total=False):
id: Snowflake
animated: bool
class ActivityEmoji(_ActivityEmojiOptional):
name: str
class ActivityButton(TypedDict):
label: str
url: str
class _SendableActivityOptional(TypedDict, total=False):
url: Optional[str]
ActivityType = Literal[0, 1, 2, 4, 5]
class SendableActivity(_SendableActivityOptional):
name: str
type: ActivityType
class _BaseActivity(SendableActivity):
created_at: int
class Activity(_BaseActivity, total=False):
state: Optional[str]
details: Optional[str]
timestamps: ActivityTimestamps
assets: ActivityAssets
party: ActivityParty
application_id: Snowflake
flags: int
emoji: Optional[ActivityEmoji]
secrets: ActivitySecrets
session_id: Optional[str]
instance: bool
buttons: List[ActivityButton]
|
Rapptz/discord.py
|
discord/types/activity.py
|
Python
|
mit
| 2,827
|
from django.apps import AppConfig
class TelegramBotConfig(AppConfig):
name = 'frekenbot'
|
frekenbok/frekenbok
|
frekenbot/apps.py
|
Python
|
mit
| 95
|
#
# Copyright: Copyright (c) MOSEK ApS, Denmark. All rights reserved.
#
# File: simple.py
#
# Purpose: Demonstrates a very simple example using MOSEK by
# reading a problem file, solving the problem and
# writing the solution to a file.
#
import mosek
import sys
def streamprinter(msg):
sys.stdout.write (msg)
sys.stdout.flush ()
if len(sys.argv) <= 1:
print ("Missing argument, syntax is:")
print (" simple inputfile [ solutionfile ]")
else:
# Create the mosek environment.
env = mosek.Env ()
# Create a task object linked with the environment env.
# We create it with 0 variables and 0 constraints initially,
# since we do not know the size of the problem.
task = env.Task (0, 0)
task.set_Stream (mosek.streamtype.log, streamprinter)
# We assume that a problem file was given as the first command
# line argument (received in `argv')
task.readdata (sys.argv[1])
# Solve the problem
task.optimize ()
# Print a summary of the solution
task.solutionsummary (mosek.streamtype.log)
# If an output file was specified, write a solution
if len(sys.argv) >= 3:
# We define the output format to be OPF, and tell MOSEK to
# leave out parameters and problem data from the output file.
task.putintparam (mosek.iparam.write_data_format, mosek.dataformat.op)
task.putintparam (mosek.iparam.opf_write_solutions, mosek.onoffkey.on)
task.putintparam (mosek.iparam.opf_write_hints, mosek.onoffkey.off)
task.putintparam (mosek.iparam.opf_write_parameters, mosek.onoffkey.off)
task.putintparam (mosek.iparam.opf_write_problem, mosek.onoffkey.off)
task.writedata (sys.argv[2])
|
edljk/Mosek.jl
|
deps/src/mosek/7/tools/examples/python/simple.py
|
Python
|
mit
| 1,730
|
import cv2
import time
import numpy as np
MAX_TARGETS = 3
TOP = 0
BOTTOM = 1
LEFT = 0
RIGHT = 1
def __init_tracker(frame):
running_average_image = np.float32(frame)
running_average_in_display_color_depth = frame.copy()
difference = frame.copy()
last_target_count = 1
last_target_change_time = 0.0
last_frame_entity_list = []
return None, \
running_average_image, \
running_average_in_display_color_depth, \
difference, \
last_target_count, \
last_target_change_time, \
last_frame_entity_list
def __merge_collided_bboxes(bbox_list):
# For every bbox...
for this_bbox in bbox_list:
# Collision detect every other bbox:
for other_bbox in bbox_list:
if this_bbox is other_bbox:
continue # Skip self
# Assume a collision to start out with:
has_collision = True
# These coords are in screen coords, so > means
# "lower than" and "further right than". And <
# means "higher than" and "further left than".
# We also inflate the box size by 10% to deal with
# fuzziness in the data. (Without this, there are many times a bbox
# is short of overlap by just one or two pixels.)
if (this_bbox[BOTTOM][0] * 1.1 < other_bbox[TOP][0] * 0.9):
has_collision = False
if (this_bbox[TOP][0] * .9 > other_bbox[BOTTOM][0] * 1.1):
has_collision = False
if (this_bbox[RIGHT][1] * 1.1 < other_bbox[LEFT][1] * 0.9):
has_collision = False
if (this_bbox[LEFT][1] * 0.9 > other_bbox[RIGHT][1] * 1.1):
has_collision = False
if has_collision:
# merge these two bboxes into one, then start over:
top_left_x = min(this_bbox[LEFT][0], other_bbox[LEFT][0])
top_left_y = min(this_bbox[LEFT][1], other_bbox[LEFT][1])
bottom_right_x = max(this_bbox[RIGHT][0], other_bbox[RIGHT][0])
bottom_right_y = max(this_bbox[RIGHT][1], other_bbox[RIGHT][1])
new_bbox = ((top_left_x, top_left_y),
(bottom_right_x, bottom_right_y))
bbox_list.remove(this_bbox)
bbox_list.remove(other_bbox)
bbox_list.append(new_bbox)
# Start over with the new list:
return __merge_collided_bboxes(bbox_list)
# When there are no collions between boxes, return that list:
return bbox_list
def __display_tracked_objects(frame):
cv2.imshow('display', frame)
cv2.waitKey(1)
def track_object(frame,
running_average_image,
running_average_in_display_color_depth,
difference,
last_target_count,
last_target_change_time,
last_frame_entity_list):
if difference is None:
return __init_tracker(frame)
frame_time = time.time()
# Create a working "color image" to modify / blur
color_image = frame.copy()
# Smooth to get rid of false positives
color_image = cv2.GaussianBlur(color_image, (19, 19), 0)
# Use the Running Average as the static background
# a = 0.020 leaves artifacts lingering way too long.
# a = 0.320 works well at 320x240, 15fps. (1/a is roughly num frames.)
cv2.accumulateWeighted(color_image, running_average_image, 0.320, None)
# Convert the scale of the moving average.
running_average_in_display_color_depth = cv2.convertScaleAbs(
running_average_image)
# Subtract the current frame from the moving average.
cv2.absdiff(color_image, running_average_in_display_color_depth, difference)
# Convert the image to greyscale.
grey_image = cv2.cvtColor(difference, cv2.COLOR_BGR2GRAY)
# Threshold the image to a black and white motion mask:
_, grey_image = cv2.threshold(grey_image, 2, 255, cv2.THRESH_BINARY)
# Smooth and threshold again to eliminate "sparkles"
grey_image = cv2.GaussianBlur(grey_image, (19, 19), 0)
_, grey_image = cv2.threshold(grey_image, 240, 255, cv2.THRESH_BINARY)
non_black_coords_array = np.where(grey_image > 3)
# Convert from numpy.where()'s two separate lists to one list of (x, y) tuples:
non_black_coords_array = zip(
non_black_coords_array[1], non_black_coords_array[0])
# Was using this to hold either pixel coords or polygon coords.
points = []
bounding_box_list = []
# Now calculate movements using the white pixels as "motion" data
_, contour, hierarchy = cv2.findContours(
grey_image, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
for cnt in contour:
[x, y, w, h] = cv2.boundingRect(cnt)
point1 = (x, y)
point2 = (x + w, y + h)
bounding_box_list.append((point1, point2))
epsilon = 0.01 * cv2.arcLength(cnt, True)
polygon_points = cv2.approxPolyDP(cnt, epsilon, True)
# To track polygon points only (instead of every pixel):
# Find the average size of the bbox (targets), then
# remove any tiny bboxes (which are prolly just noise).
# "Tiny" is defined as any box with 1/10th the area of the average box.
# This reduces false positives on tiny "sparkles" noise.
box_areas = []
for box in bounding_box_list:
box_width = box[RIGHT][0] - box[LEFT][0]
box_height = box[BOTTOM][0] - box[TOP][0]
box_areas.append(box_width * box_height)
average_box_area = 0.0
if len(box_areas):
average_box_area = float(sum(box_areas)) / len(box_areas)
trimmed_box_list = []
for box in bounding_box_list:
box_width = box[RIGHT][0] - box[LEFT][0]
box_height = box[BOTTOM][0] - box[TOP][0]
# Only keep the box if it's not a tiny noise box:
if (box_width * box_height) > average_box_area * 0.1:
trimmed_box_list.append(box)
bounding_box_list = __merge_collided_bboxes(trimmed_box_list)
# Draw the merged box list:
for box in bounding_box_list:
cv2.rectangle(frame, box[0], box[1], (0, 255, 0), 1)
# Here are our estimate points to track, based on merged & trimmed boxes:
estimated_target_count = len(bounding_box_list)
# Don't allow target "jumps" from few to many or many to few.
# Only change the number of targets up to one target per n seconds.
# This fixes the "exploding number of targets" when something stops moving
# and the motion erodes to disparate little puddles all over the place.
# 1 change per 0.35 secs
if (frame_time - last_target_change_time) < .350:
estimated_target_count = last_target_count
else:
if (last_target_count - estimated_target_count) > 1:
estimated_target_count = last_target_count - 1
if (estimated_target_count - last_target_count) > 1:
estimated_target_count = last_target_count + 1
last_target_change_time = frame_time
# Clip to the user-supplied maximum:
estimated_target_count = min(estimated_target_count, MAX_TARGETS)
# The estimated_target_count at this point is the maximum number of targets
# we want to look for. If kmeans decides that one of our candidate
# bboxes is not actually a target, we remove it from the target list below.
# Using the numpy values directly (treating all pixels as points):
points = non_black_coords_array
center_points = []
list(points)
if len(list(points)):
# If we have all the "target_count" targets from last frame,
# use the previously known targets (for greater accuracy).
criteria = (cv2.TERM_CRITERIA_EPS +
cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
flags = cv2.KMEANS_RANDOM_CENTERS
compactness, labels, centers = cv2.kmeans(
points, 2, None, criteria, 10, flags)
for center_point in centers:
center_points.append(center_point)
# Now we have targets that are NOT computed from bboxes -- just
# movement weights (according to kmeans). If any two targets are
# within the same "bbox count", average them into a single target.
# (Any kmeans targets not within a bbox are also kept.)
trimmed_center_points = []
removed_center_points = []
for box in bounding_box_list:
# Find the centers within this box:
center_points_in_box = []
for center_point in center_points:
if center_point[0] < box[RIGHT][0] and \
center_point[0] > box[LEFT][0] and \
center_point[1] < box[BOTTOM][1] and \
center_point[1] > box[TOP][1]:
# This point is within the box.
center_points_in_box.append(center_point)
# Now see if there are more than one. If so, merge them.
if len(center_points_in_box) > 1:
# Merge them:
x_list = []
y_list = []
for point in center_points_in_box:
x_list.append(point[0])
y_list.append(point[1])
average_x = int(float(sum(x_list)) / len(x_list))
average_y = int(float(sum(y_list)) / len(y_list))
trimmed_center_points.append((average_x, average_y))
# Record that they were removed:
removed_center_points += center_points_in_box
if len(center_points_in_box) == 1:
# Just use it.
trimmed_center_points.append(center_points_in_box[0])
# If there are any center_points not within a bbox, just use them.
# (It's probably a cluster comprised of a bunch of small bboxes.)
for center_point in center_points:
if (not center_point in trimmed_center_points) and \
(not center_point in removed_center_points):
trimmed_center_points.append(center_point)
# Determine if there are any new (or lost) targets:
actual_target_count = len(trimmed_center_points)
last_target_count = actual_target_count
# Now build the list of physical entities (objects)
this_frame_entity_list = []
# An entity is list: [name, color, last_time_seen, last_known_coords]
for target in trimmed_center_points:
# Is this a target near a prior entity (same physical entity)?
entity_found = False
entity_distance_dict = {}
for entity in last_frame_entity_list:
entity_coords = entity[3]
delta_x = entity_coords[0] - target[0]
delta_y = entity_coords[1] - target[1]
distance = sqrt(pow(delta_x, 2) + pow(delta_y, 2))
entity_distance_dict[distance] = entity
# Did we find any non-claimed entities (nearest to furthest):
distance_list = entity_distance_dict.keys()
distance_list.sort()
for distance in distance_list:
# Yes; see if we can claim the nearest one:
nearest_possible_entity = entity_distance_dict[distance]
# Don't consider entities that are already claimed:
if nearest_possible_entity in this_frame_entity_list:
continue
# Found the nearest entity to claim:
entity_found = True
# Update last_time_seen
nearest_possible_entity[2] = frame_time
# Update the new location
nearest_possible_entity[3] = target
this_frame_entity_list.append(nearest_possible_entity)
break
if not entity_found:
# It's a new entity.
color = (random.randint(0, 255), random.randint(
0, 255), random.randint(0, 255))
name = hashlib.md5(str(frame_time) + str(color)).hexdigest()[:6]
last_time_seen = frame_time
new_entity = [name, color, last_time_seen, target]
this_frame_entity_list.append(new_entity)
# Now "delete" any not-found entities which have expired:
entity_ttl = 1.0 # 1 sec
for entity in last_frame_entity_list:
last_time_seen = entity[2]
if frame_time - last_time_seen > entity_ttl:
# It's gone.
pass
else:
# Save it for next time... not expired yet:
this_frame_entity_list.append(entity)
# For next frame:
last_frame_entity_list = this_frame_entity_list
center_point = None
for entity in bounding_box_list:
if (int(entity[1][0] - entity[0][0]) > 50) & (int(entity[1][1] - entity[0][1]) > 50):
center_point = int(
(entity[0][0] + entity[1][0]) / 2), int((entity[0][1] + entity[1][1]) / 2)
cv2.circle(frame, center_point, 20, (0, 0, 255), 1)
cv2.circle(frame, center_point, 10, (255, 0, 0), 2)
__display_tracked_objects(frame)
return center_point, \
running_average_image, \
running_average_in_display_color_depth, \
difference, \
last_target_count, \
last_target_change_time, \
last_frame_entity_list
|
walkover/auto-tracking-cctv-gateway
|
gateway/camera/tracker.py
|
Python
|
mit
| 13,148
|
from django.contrib import admin
from tsj.models import *
admin.site.register(Company)
admin.site.register(Resident)
admin.site.register(House)
admin.site.register(ServiceCompany)
admin.site.register(MeterType)
admin.site.register(MeterReadingHistory)
admin.site.register(Employer)
admin.site.register(Notification)
|
dan4ik95dv/housemanagement
|
tsj/admin.py
|
Python
|
mit
| 317
|
from __future__ import print_function
import aaf
import aaf.mob
import aaf.define
import aaf.iterator
import aaf.dictionary
import aaf.storage
import aaf.component
import aaf.util
from aaf.util import AUID, MobID
import unittest
import os
cur_dir = os.path.dirname(os.path.abspath(__file__))
sandbox = os.path.join(cur_dir,'sandbox')
if not os.path.exists(sandbox):
os.makedirs(sandbox)
class TestCommentMaker(unittest.TestCase):
def test_add_comment_marker_props(self):
f = aaf.open()
# add RGBColor TypeDef
rgb_id = AUID("urn:uuid:e96e6d43-c383-11d3-a069-006094eb75cb")
rgb_typedef_pairs = [('red', 'UInt16'),
('green', 'UInt16'),
("blue", 'UInt16'),
]
rgb_typdef = aaf.define.TypeDefRecord(f, rgb_typedef_pairs, rgb_id, 'RGBColor')
f.dictionary.register_def(rgb_typdef)
cm_classdef = f.dictionary.lookup_classdef("CommentMarker")
string_typedef = f.dictionary.lookup_typedef("string")
# add CommentMarkerTime property
cm_time_id = AUID("urn:uuid:c4c45d9c-0967-11d4-a08a-006094eb75cb")
cm_prop_name = "CommentMarkerTime"
cm_classdef.register_optional_propertydef(string_typedef, cm_time_id, cm_prop_name)
# add CommentMarkerDate property
cm_date_id = AUID("urn:uuid:c4c45d9b-0967-11d4-a08a-006094eb75cb")
cm_prop_name = "CommentMarkerDate"
cm_classdef.register_optional_propertydef(string_typedef, cm_date_id, cm_prop_name)
# add CommentMarkerUSer property
cm_user_id = AUID("urn:uuid:c4c45d9a-0967-11d4-a08a-006094eb75cb")
cm_prop_name = "CommentMarkerUSer"
cm_classdef.register_optional_propertydef(string_typedef, cm_user_id, cm_prop_name)
# add CommentMarkerColor property
cm_color_id = AUID("urn:uuid:e96e6d44-c383-11d3-a069-006094eb75cb")
cm_prop_name = "CommentMarkerColor"
cm_classdef.register_optional_propertydef(rgb_typdef, cm_color_id, cm_prop_name)
# add CommentMarkerAttributeList property
cm_attr_list_id = AUID("urn:uuid:c72cc817-aac5-499b-af34-bc47fec1eaa8")
strongref = f.dictionary.lookup_typedef("TaggedValueStrongReferenceVector")
cm_prop_name = "CommentMarkerAttributeList"
cm_classdef.register_optional_propertydef(strongref, cm_attr_list_id, cm_prop_name)
marker = f.create.DescriptiveMarker()
marker['CommentMarkerTime'].value = "22:40"
marker['CommentMarkerDate'].value = "06/18/2016"
marker['CommentMarkerUSer'].value = "USERNAME"
marker_colors = {"red":41471, "green":12134, "blue":6564}
marker["CommentMarkerColor"].value = marker_colors
int32_typedef = f.dictionary.lookup_typedef("Int32")
crm_id = "060a2b340101010101010f0013-000000-5766066e2cd404e5-060e2b347f7f-2a80"
crm_com = "This is the first marker text"
attr_data = [('_ATN_CRM_LONG_CREATE_DATE', 1466304031, int32_typedef ),
('_ATN_CRM_USER', "USERNAME", string_typedef),
('_ATN_CRM_DATE', "06/18/2016", string_typedef),
('_ATN_CRM_TIME', "22:40", string_typedef),
('_ATN_CRM_COLOR', "Red", string_typedef),
('_ATN_CRM_COM', crm_com, string_typedef),
('_ATN_CRM_LONG_MOD_DATE', 1466304042, int32_typedef ),
('_ATN_CRM_ID', crm_id, string_typedef),
]
tagged_values = [f.create.TaggedValue(name, value, typedef) for name, value,typedef in attr_data]
marker["CommentMarkerAttributeList"].value = tagged_values
for i,tag in enumerate(marker["CommentMarkerAttributeList"].value):
self.assertEqual(tag.name, attr_data[i][0])
self.assertEqual(tag.value, attr_data[i][1])
self.assertEqual(tag.typedef(), attr_data[i][2])
self.assertEqual(marker["CommentMarkerColor"].value, marker_colors)
self.assertEqual(marker['CommentMarkerUSer'].value, "USERNAME")
if __name__ == "__main__":
unittest.main()
|
markreidvfx/pyaaf
|
tests/test_CommentMarker.py
|
Python
|
mit
| 4,264
|
import hashlib
from datetime import datetime
from flask import request
from flask_login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from . import db, login_manager
#pylint: disable-msg=E1101
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
#pylint: disable-msg=E1101
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
username = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
is_admin = db.Column(db.Boolean)
name = db.Column(db.String(64))
location = db.Column(db.String(64))
bio = db.Column(db.Text)
password_hash = db.Column(db.String(128))
avatar_hash = db.Column(db.String(32))
member_since = db.Column(db.DateTime(), default = datetime.utcnow)
fb_token = db.Column(db.Text)
posts = db.relationship('Post', backref='author', lazy='dynamic')
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.email is not None and self.avatar_hash is None:
self.avatar_hash = hashlib.md5(self.email.encode('utf-8')).hexdigest()
#pylint: disable-msg=R0201
@property
def password(self):
raise AttributeError('Password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def gravatar(self, size=100, default='identicon', rating='g'):
if request.is_secure:
url = 'https://secure.gravatar.com/avatar'
else:
url = 'http://www.gravatar.com/avatar'
h = self.avatar_hash or \
hashlib.md5(self.email.encode('utf-8')).hexdigest()
return '{u}/{h}?s={s}&d={d}&r={r}'.format(u=url,
h=h,
s=size,
d=default,
r=rating)
class Post(db.Model):
__tablename__ = 'posts'
__searchable__ = ['title', 'body_html']
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(64))
body = db.Column(db.Text)
body_html = db.Column(db.Text)
language = db.Column(db.String(4), default='is')
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
category_id = db.Column(db.Integer, db.ForeignKey('categories.id'))
def __init__(self, **kwargs):
super(Post, self).__init__(**kwargs)
@classmethod
def get_all(cls, descending=True):
if descending:
return cls.query.order_by(cls.timestamp.desc()).all()
else:
return cls.query.order_by(cls.timestamp).all()
@classmethod
def get_all_by_lang(cls, descending=True, lang='is'):
if descending:
return cls.query.order_by(cls.timestamp.desc())\
.filter_by(language=lang)\
.all()
else:
return cls.query.order_by(cls.timestamp)\
.filter_by(language=lang)\
.all()
@classmethod
def get_per_page(cls, page, per_page=5, descending=True, lang='is'):
if descending:
return cls.query.order_by(cls.timestamp.desc())\
.filter_by(language=lang)\
.paginate(page, per_page, False)
else:
return cls.query.order_by(cls.timestamp)\
.filter_by(language=lang)\
.paginate(page, per_page, False)
@classmethod
def get_by_id(cls, aid):
return cls.query.filter_by(id=aid).first_or_404()
#pylint: disable-msg=R0913
@classmethod
def get_by_category(cls, cid, page, per_page=5, descending=True, lang='is'):
if descending:
return cls.query.filter(cls.category_id == cid)\
.filter_by(language=lang)\
.order_by(cls.timestamp.desc())\
.paginate(page, per_page, False)
else:
return cls.query.filter(cls.category_id == cid)\
.filter_by(language=lang)\
.order_by(cls.timestamp)\
.paginate(page, per_page, False)
@classmethod
def search(cls, query, page, per_page=4, descending=True):
if descending:
return cls.query.whoosh_search(query)\
.order_by(cls.timestamp.desc())\
.paginate(page, per_page, False)
else:
return cls.query.whoosh_search(query)\
.order_by(cls.timestamp)\
.paginate(page, per_page, False)
class Category(db.Model):
__tablename__ = 'categories'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(64), nullable=False, unique=True)
name_en = db.Column(db.String(64), nullable=False, unique=True)
active = db.Column(db.Boolean, nullable=False, default=False)
posts = db.relationship('Post', backref='category', lazy='dynamic')
def __init__(self, **kwargs):
super(Category, self).__init__(**kwargs)
@classmethod
def get_all_active(cls, active=True):
if active:
return cls.query.filter_by(active=True)\
.filter(cls.name != 'Almenn frétt').all()
else:
return cls.query.filter_by(active=False)\
.filter(cls.name != 'Almenn frétt').all()
@classmethod
def get_by_name(cls, name):
return cls.query.filter_by(name=name).first()
class Image(db.Model):
__tablename__ = 'images'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
filename = db.Column(db.String(120), nullable=True)
ad_html = db.Column(db.Text)
location = db.Column(db.String(120), nullable=False)
type = db.Column(db.Integer, nullable=False)
url = db.Column(db.String(120))
active = db.Column(db.Boolean, default=False)
timestamp = db.Column(db.DateTime,
nullable=False,
default=datetime.utcnow)
def __init__(self, **kwargs):
super(Image, self).__init__(**kwargs)
@classmethod
def get_all_imgs(cls, descending=True):
if descending:
return cls.query.filter(cls.type >= 10)\
.order_by(cls.timestamp.desc()).all()
else:
return cls.query.filter(cls.type >= 10)\
.order_by(cls.timestamp).all()
@classmethod
def get_all_ads(cls, descending=True, only_active=True):
if descending:
if only_active:
return cls.query.filter(cls.type < 10)\
.filter(cls.active == True)\
.order_by(cls.timestamp.desc()).all()
else:
return cls.query.filter(cls.type < 10)\
.order_by(cls.timestamp.desc()).all()
else:
if only_active:
return cls.query.filter(cls.type < 10)\
.filter(cls.active == True)\
.order_by(cls.timestamp).all()
else:
return cls.query.filter(cls.type < 10)\
.order_by(cls.timestamp).all()
@classmethod
def get_by_id(cls, aid):
return cls.query.filter_by(id=aid).first()
#pylint: disable-msg=R0903
class About(db.Model):
__tablename__ = 'about'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
body = db.Column(db.Text)
timestamp = db.Column(db.DateTime,
nullable=False,
default=datetime.utcnow)
def __init__(self, **kwargs):
super(About, self).__init__(**kwargs)
|
finnurtorfa/aflafrettir.is
|
app/models.py
|
Python
|
mit
| 8,023
|
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
import time
from openerp import pooler
from openerp.osv import fields, osv
from openerp.tools.translate import _
class is_api(osv.osv):
_name = 'is_api'
_description = u'Fonctions générales'
def get_usagers_structure(self, cr, uid, structure_id, context=None):
""" Retourner la liste des usagers appartenants à la structure passée en paramètre
"""
usager_line_obj = self.pool.get('ove.usager.structure')
line_ids = usager_line_obj.search(cr, uid, [('structure_id','=',structure_id)], context=context)
print 'line_ids *****', line_ids
usagers = list(set([line['usager_id'][0] for line in usager_line_obj.read(cr, uid, line_ids, ['usager_id'], context=context)]))
return usagers
def get_usager_groups(self, cr, uid, usager_id, context=None):
""" Retourner les groupes associés à l'usager passé en paramètre
"""
group_obj = self.pool.get('ove.groupe')
group_ids = group_obj.search(cr, uid, [('usager_id','=', usager_id)], context=context)
groups = []
for group in group_obj.read(cr, uid, group_ids, ['id', 'code'], context=context):
groups.append({'id':group['id'], 'code':group['code']})
newlist = sorted(groups, key=lambda k: k['code'])
return newlist
def get_users_usager(self, cr, uid, structure_lines, context=None):
""" Retourner les utilisateurs liés aux groupes de l'usager à partir des structures qui leurs appartient
"""
users = {'group_1':[], 'group_2':[], 'group_3':[], 'group_4':[], 'group_5':[],
'group_6':[], 'group_7':[], 'group_8':[], 'group_9':[], 'group_10':[]
}
if not structure_lines:
return users
for line in structure_lines:
if line.structure_id.users_line:
for user_line in line.structure_id.users_line:
if user_line.group_1 and line.group_1:
users['group_1'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_2 and line.group_2:
users['group_2'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_3 and line.group_3:
users['group_3'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_4 and line.group_4:
users['group_4'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_5 and line.group_5:
users['group_5'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_6 and line.group_6:
users['group_6'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_7 and line.group_7:
users['group_7'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_8 and line.group_8:
users['group_8'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
if user_line.group_9 and line.group_9:
users['group_9'].append(user_line.user_id.id)
users['group_10'].append(user_line.user_id.id)
""" Eliminer les doublons des listes """
users.update({'group_1': list(set(users['group_1']))})
users.update({'group_2': list(set(users['group_2']))})
users.update({'group_3': list(set(users['group_3']))})
users.update({'group_4': list(set(users['group_4']))})
users.update({'group_5': list(set(users['group_5']))})
users.update({'group_6': list(set(users['group_6']))})
users.update({'group_7': list(set(users['group_7']))})
users.update({'group_8': list(set(users['group_8']))})
users.update({'group_9': list(set(users['group_9']))})
users.update({'group_10': list(set(users['group_10']))})
return users
def create_group(self, cr, uid, code_groupe, prefix, name_group, users, usager_id, context=None):
""" Création d'un groupe OVE
"""
vals = {
'code': code_groupe,
'name': prefix + ' - ' + name_group,
'user_ids': [[6, 0, users]],
'usager_id': usager_id,
}
return self.pool.get('ove.groupe').create(cr, uid, vals, context=context)
def associate_groupe_usager(self, cr, uid, usager_id, group_id, group_usager, context=None):
""" Associer un groupe au groupe correspondant de l'usager
"""
usager_obj = self.pool.get('is.usager')
if group_usager == 'G1':
usager_obj.write(cr, uid, usager_id, {'group_1': group_id}, context=context)
if group_usager == 'G2':
usager_obj.write(cr, uid, usager_id, {'group_2': group_id}, context=context)
if group_usager == 'G3':
usager_obj.write(cr, uid, usager_id, {'group_3': group_id}, context=context)
if group_usager == 'G4':
usager_obj.write(cr, uid, usager_id, {'group_4': group_id}, context=context)
if group_usager == 'G5':
usager_obj.write(cr, uid, usager_id, {'group_5': group_id}, context=context)
if group_usager == 'G6':
usager_obj.write(cr, uid, usager_id, {'group_6': group_id}, context=context)
if group_usager == 'G7':
usager_obj.write(cr, uid, usager_id, {'group_7': group_id}, context=context)
if group_usager == 'G8':
usager_obj.write(cr, uid, usager_id, {'group_8': group_id}, context=context)
if group_usager == 'G9':
usager_obj.write(cr, uid, usager_id, {'group_9': group_id}, context=context)
if group_usager == 'G10':
usager_obj.write(cr, uid, usager_id, {'group_10': group_id}, context=context)
return True
def create_ove_groups(self, cr, uid, prefix, users, usager_id, context=None):
""" Création de l'ensemble des groupes pour chaque usager ou structure
"""
group_id = self.create_group(cr, uid, 'G1', prefix, 'Groupe Impression', users['group_1'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G1', context)
group_id = self.create_group(cr, uid, 'G2', prefix, 'Groupe Donnée Administrative', users['group_2'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G2', context)
group_id = self.create_group(cr, uid, 'G3', prefix, 'Groupe Donnée Administrative Modification', users['group_3'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G3', context)
group_id = self.create_group(cr, uid, 'G4', prefix, 'Groupe Donnée Institutionnelle', users['group_4'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G4', context)
group_id = self.create_group(cr, uid, 'G5', prefix, 'Groupe Donnée Institutionnelle Modification', users['group_5'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G5', context)
group_id = self.create_group(cr, uid, 'G6', prefix, 'Groupe Donnée Institutionnelle Validation', users['group_6'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G6', context)
group_id = self.create_group(cr, uid, 'G7', prefix, 'Groupe Donnée métier', users['group_7'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G7', context)
group_id = self.create_group(cr, uid, 'G8', prefix, 'Groupe Donnée métier Modification', users['group_8'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G8', context)
group_id = self.create_group(cr, uid, 'G9', prefix, 'Groupe Donnée métier Validation', users['group_9'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G9', context)
group_id = self.create_group(cr, uid, 'G10', prefix, 'Groupe Structure', users['group_10'], usager_id, context=context)
self.associate_groupe_usager(cr, uid, usager_id, group_id, 'G10', context)
return True
def update_usager_groupes(self, cr, uid, usager_id, users, context=None):
""" Mettre à jour les groupes de l'usager courant
"""
groups = self.get_usager_groups(cr, uid, usager_id, context=context)
for group in groups:
if group['code'] == 'G1':
self.update_ove_group(cr, uid, group['id'], users['group_1'], context)
if group['code'] == 'G2':
self.update_ove_group(cr, uid, group['id'], users['group_2'], context)
if group['code'] == 'G3':
self.update_ove_group(cr, uid, group['id'], users['group_3'], context)
if group['code'] == 'G4':
self.update_ove_group(cr, uid, group['id'], users['group_4'], context)
if group['code'] == 'G5':
self.update_ove_group(cr, uid, group['id'], users['group_5'], context)
if group['code'] == 'G6':
self.update_ove_group(cr, uid, group['id'], users['group_6'], context)
if group['code'] == 'G7':
self.update_ove_group(cr, uid, group['id'], users['group_7'], context)
if group['code'] == 'G8':
self.update_ove_group(cr, uid, group['id'], users['group_8'], context)
if group['code'] == 'G9':
self.update_ove_group(cr, uid, group['id'], users['group_9'], context)
if group['code'] == 'G10':
self.update_ove_group(cr, uid, group['id'], users['group_10'], context)
return True
def update_ove_group(self, cr, uid, group_id, users, context=None):
""" Mettre à jour d'un groupe d'un usager
"""
vals = {
'user_ids': [[6, 0, users]],
}
return self.pool.get('ove.groupe').write(cr, uid, group_id, vals, context=context)
def get_missed_ove_group(self, cr, uid, usager_groups, context=None):
""" Chercher le groupe manquant dans la liste des groupes d'un usager
"""
groups = ['G1', 'G2', 'G3', 'G4', 'G5', 'G6', 'G7', 'G8', 'G9', 'G10']
exist_groups = []
missed_groups = []
for group in usager_groups:
exist_groups.append(group['code'])
for group in groups:
if group not in exist_groups:
missed_groups.append(group)
else:
continue
return missed_groups
def create_missed_ove_group(self, cr, uid, group, usager_id, prefix, context=None):
""" Créer les groupes manquant de l'usager passé en paramètre
"""
if group == 'G1':
self.create_group(cr, uid, 'G1', prefix, 'Groupe Impression', [], usager_id, context=context)
if group == 'G2':
self.create_group(cr, uid, 'G2', prefix, 'Groupe Donnée Administrative', [], usager_id, context=context)
if group == 'G3':
self.create_group(cr, uid, 'G3', prefix, 'Groupe Donnée Administrative Modification', [], usager_id, context=context)
if group == 'G4':
self.create_group(cr, uid, 'G4', prefix, 'Groupe Donnée Institutionnelle', [], usager_id, context=context)
if group == 'G5':
self.create_group(cr, uid, 'G5', prefix, 'Groupe Donnée Institutionnelle Modification', [], usager_id, context=context)
if group == 'G6':
self.create_group(cr, uid, 'G6', prefix, 'Groupe Donnée Institutionnelle Validation', [], usager_id, context=context)
if group == 'G7':
self.create_group(cr, uid, 'G7', prefix, 'Groupe Donnée métier', [], usager_id, context=context)
if group == 'G8':
self.create_group(cr, uid, 'G8', prefix, 'Groupe Donnée métier Modification', [], usager_id, context=context)
if group == 'G9':
self.create_group(cr, uid, 'G9', prefix, 'Groupe Donnée métier Validation', [], usager_id, context=context)
if group == 'G10':
self.create_group(cr, uid, 'G10', prefix, 'Groupe Structure', [], usager_id, context=context)
return True
|
tonygalmiche/ove_structure
|
is_api.py
|
Python
|
mit
| 13,118
|
#!/usr/bin/env python3
# Distributed under CC0 1.0 Universal (CC0 1.0) Public Domain Dedication.
# pylint: disable=ungrouped-imports,wrong-import-position
import os
import sys
import pathlib
import asyncio
import logging
import importlib
import pyuavcan
# Production applications are recommended to compile their DSDL namespaces as part of the build process. The enclosed
# file "setup.py" provides an example of how to do that. The output path we specify here shall match that of "setup.py".
# Here we compile DSDL just-in-time to demonstrate an alternative.
compiled_dsdl_dir = pathlib.Path(__file__).resolve().parent / ".demo_dsdl_compiled"
# Make the compilation outputs importable. Let your IDE index this directory as sources to enable code completion.
sys.path.insert(0, str(compiled_dsdl_dir))
try:
import sirius_cyber_corp # This is our vendor-specific root namespace. Custom data types.
import pyuavcan.application # This module requires the root namespace "uavcan" to be transcompiled.
except (ImportError, AttributeError): # Redistributable applications typically don't need this section.
logging.warning("Transcompiling DSDL, this may take a while")
src_dir = pathlib.Path(__file__).resolve().parent
pyuavcan.dsdl.compile_all(
[
src_dir / "custom_data_types/sirius_cyber_corp",
src_dir / "public_regulated_data_types/uavcan/",
],
output_directory=compiled_dsdl_dir,
)
importlib.invalidate_caches() # Python runtime requires this.
import sirius_cyber_corp
import pyuavcan.application
# Import other namespaces we're planning to use. Nested namespaces are not auto-imported, so in order to reach,
# say, "uavcan.node.Heartbeat", you have to "import uavcan.node".
import uavcan.node # noqa
import uavcan.si.sample.temperature # noqa
import uavcan.si.unit.temperature # noqa
import uavcan.si.unit.voltage # noqa
class DemoApp:
REGISTER_FILE = "demo_app.db"
"""
The register file stores configuration parameters of the local application/node. The registers can be modified
at launch via environment variables and at runtime via RPC-service "uavcan.register.Access".
The file will be created automatically if it doesn't exist.
"""
def __init__(self) -> None:
node_info = uavcan.node.GetInfo_1.Response(
software_version=uavcan.node.Version_1(major=1, minor=0),
name="org.uavcan.pyuavcan.demo.demo_app",
)
# The Node class is basically the central part of the library -- it is the bridge between the application and
# the UAVCAN network. Also, it implements certain standard application-layer functions, such as publishing
# heartbeats and port introspection messages, responding to GetInfo, serving the register API, etc.
# The register file stores the configuration parameters of our node (you can inspect it using SQLite Browser).
self._node = pyuavcan.application.make_node(node_info, DemoApp.REGISTER_FILE)
# Published heartbeat fields can be configured as follows.
self._node.heartbeat_publisher.mode = uavcan.node.Mode_1.OPERATIONAL # type: ignore
self._node.heartbeat_publisher.vendor_specific_status_code = os.getpid() % 100
# Now we can create ports to interact with the network.
# They can also be created or destroyed later at any point after initialization.
# A port is created by specifying its data type and its name (similar to topic names in ROS or DDS).
# The subject-ID is obtained from the standard register named "uavcan.sub.temperature_setpoint.id".
# It can also be modified via environment variable "UAVCAN__SUB__TEMPERATURE_SETPOINT__ID".
self._sub_t_sp = self._node.make_subscriber(uavcan.si.unit.temperature.Scalar_1, "temperature_setpoint")
# As you may probably guess by looking at the port names, we are building a basic thermostat here.
# We subscribe to the temperature setpoint, temperature measurement (process variable), and publish voltage.
# The corresponding registers are "uavcan.sub.temperature_measurement.id" and "uavcan.pub.heater_voltage.id".
self._sub_t_pv = self._node.make_subscriber(uavcan.si.sample.temperature.Scalar_1, "temperature_measurement")
self._pub_v_cmd = self._node.make_publisher(uavcan.si.unit.voltage.Scalar_1, "heater_voltage")
# Create an RPC-server. The service-ID is read from standard register "uavcan.srv.least_squares.id".
# This service is optional: if the service-ID is not specified, we simply don't provide it.
try:
srv_least_sq = self._node.get_server(sirius_cyber_corp.PerformLinearLeastSquaresFit_1, "least_squares")
srv_least_sq.serve_in_background(self._serve_linear_least_squares)
except pyuavcan.application.register.MissingRegisterError:
logging.info("The least squares service is disabled by configuration")
# Create another RPC-server using a standard service type for which a fixed service-ID is defined.
# We don't specify the port name so the service-ID defaults to the fixed port-ID.
# We could, of course, use it with a different service-ID as well, if needed.
self._node.get_server(uavcan.node.ExecuteCommand_1).serve_in_background(self._serve_execute_command)
self._node.start() # Don't forget to start the node!
@staticmethod
async def _serve_linear_least_squares(
request: sirius_cyber_corp.PerformLinearLeastSquaresFit_1.Request,
metadata: pyuavcan.presentation.ServiceRequestMetadata,
) -> sirius_cyber_corp.PerformLinearLeastSquaresFit_1.Response:
logging.info("Least squares request %s from node %d", request, metadata.client_node_id)
sum_x = sum(map(lambda p: p.x, request.points)) # type: ignore
sum_y = sum(map(lambda p: p.y, request.points)) # type: ignore
a = sum_x * sum_y - len(request.points) * sum(map(lambda p: p.x * p.y, request.points)) # type: ignore
b = sum_x * sum_x - len(request.points) * sum(map(lambda p: p.x ** 2, request.points)) # type: ignore
try:
slope = a / b
y_intercept = (sum_y - slope * sum_x) / len(request.points)
except ZeroDivisionError:
slope = float("nan")
y_intercept = float("nan")
return sirius_cyber_corp.PerformLinearLeastSquaresFit_1.Response(slope=slope, y_intercept=y_intercept)
@staticmethod
async def _serve_execute_command(
request: uavcan.node.ExecuteCommand_1.Request,
metadata: pyuavcan.presentation.ServiceRequestMetadata,
) -> uavcan.node.ExecuteCommand_1.Response:
logging.info("Execute command request %s from node %d", request, metadata.client_node_id)
if request.command == uavcan.node.ExecuteCommand_1.Request.COMMAND_FACTORY_RESET:
try:
os.unlink(DemoApp.REGISTER_FILE) # Reset to defaults by removing the register file.
except OSError: # Do nothing if already removed.
pass
return uavcan.node.ExecuteCommand_1.Response(uavcan.node.ExecuteCommand_1.Response.STATUS_SUCCESS)
return uavcan.node.ExecuteCommand_1.Response(uavcan.node.ExecuteCommand_1.Response.STATUS_BAD_COMMAND)
async def run(self) -> None:
"""
The main method that runs the business logic. It is also possible to use the library in an IoC-style
by using receive_in_background() for all subscriptions if desired.
"""
temperature_setpoint = 0.0
temperature_error = 0.0
async def on_setpoint(msg: uavcan.si.unit.temperature.Scalar_1, _: pyuavcan.transport.TransferFrom) -> None:
nonlocal temperature_setpoint
temperature_setpoint = msg.kelvin
self._sub_t_sp.receive_in_background(on_setpoint) # IoC-style handler.
# Expose internal states to external observers for diagnostic purposes. Here, we define read-only registers.
# Since they are computed at every invocation, they are never stored in the register file.
self._node.registry["thermostat.error"] = lambda: temperature_error
self._node.registry["thermostat.setpoint"] = lambda: temperature_setpoint
# Read application settings from the registry. The defaults will be used only if a new register file is created.
gain_p, gain_i, gain_d = self._node.registry.setdefault("thermostat.pid.gains", [0.12, 0.18, 0.01]).floats
logging.info("Application started with PID gains: %.3f %.3f %.3f", gain_p, gain_i, gain_d)
print("Running. Press Ctrl+C to stop.", file=sys.stderr)
# This loop will exit automatically when the node is close()d. It is also possible to use receive() instead.
async for m, _metadata in self._sub_t_pv:
assert isinstance(m, uavcan.si.sample.temperature.Scalar_1)
temperature_error = temperature_setpoint - m.kelvin
voltage_output = temperature_error * gain_p # Suppose this is a basic P-controller.
await self._pub_v_cmd.publish(uavcan.si.unit.voltage.Scalar_1(voltage_output))
def close(self) -> None:
"""
This will close all the underlying resources down to the transport interface and all publishers/servers/etc.
All pending tasks such as serve_in_background()/receive_in_background() will notice this and exit automatically.
"""
self._node.close()
async def main() -> None:
logging.root.setLevel(logging.INFO)
app = DemoApp()
try:
await app.run()
except KeyboardInterrupt:
pass
finally:
app.close()
if __name__ == "__main__":
asyncio.run(main())
|
UAVCAN/pyuavcan
|
demo/demo_app.py
|
Python
|
mit
| 9,775
|
import string
from pyparsing import (
Literal, White, Word, alphanums, CharsNotIn, Forward, Group, SkipTo,
Optional, OneOrMore, ZeroOrMore, pythonStyleComment)
class Parser(object):
left_bracket = Literal("{").suppress()
right_bracket = Literal("}").suppress()
semicolon = Literal(";").suppress()
space = White().suppress()
key = Word(alphanums + "_/")
value = CharsNotIn("{};")
value2 = CharsNotIn(";")
location = CharsNotIn("{};," + string.whitespace)
ifword = Literal("if")
setword = Literal("set")
modifier = Literal("=") | Literal("~*") | Literal("~") | Literal("^~")
assignment = (key + Optional(space + value) + semicolon)
setblock = (setword + OneOrMore(space + value2) + semicolon)
block = Forward()
ifblock = Forward()
subblock = Forward()
ifblock << (
ifword
+ SkipTo('{')
+ left_bracket
+ subblock
+ right_bracket)
subblock << ZeroOrMore(
Group(assignment) | block | ifblock | setblock
)
block << Group(
Group(key + Optional(space + modifier) + Optional(space + location))
+ left_bracket
+ Group(subblock)
+ right_bracket
)
script = OneOrMore(Group(assignment) | block).ignore(pythonStyleComment)
def __init__(self, source):
self.source = source
def parse(self):
return self.script.parseString(self.source)
def as_list(self):
return self.parse().asList()
class Dumper(object):
def __init__(self, blocks, indentation=4):
self.blocks = blocks
self.indentation = indentation
def __iter__(self, blocks=None, current_indent=0, spacer=' '):
blocks = blocks or self.blocks
for key, values in blocks:
if current_indent:
yield spacer
indentation = spacer * current_indent
if isinstance(key, list):
yield indentation + spacer.join(key) + ' {'
for parameter in values:
if isinstance(parameter[0], list):
dumped = self.__iter__(
[parameter],
current_indent + self.indentation)
for line in dumped:
yield line
else:
dumped = spacer.join(parameter) + ';'
yield spacer * (
current_indent + self.indentation) + dumped
yield indentation + '}'
else:
yield spacer * current_indent + key + spacer + values + ';'
def as_string(self):
return '\n'.join(self)
def to_file(self, out):
for line in self:
out.write(line+"\n")
out.close()
return out
def loads(source):
return Parser(source).as_list()
def load(_file):
return loads(_file.read())
def dumps(blocks, indentation=4):
return Dumper(blocks, indentation).as_string()
def dump(blocks, _file, indentation=4):
return Dumper(blocks, indentation).to_file(_file)
|
Nat-Lab/pac.py
|
lib/confParser.py
|
Python
|
mit
| 3,119
|
# -*- coding: utf-8 -*-
from PyQt4.QtCore import Qt
from PyQt4 import QtCore, QtGui
class GetCoverTask(QtCore.QObject):
coverLoaded = QtCore.pyqtSignal(QtGui.QImage)
def __init__(self, coverPath):
super(GetCoverTask, self).__init__()
self._coverPath = coverPath
def run(self):
image = QtGui.QImage(self._coverPath)
image = image.scaled(120, 120, Qt.KeepAspectRatio,
Qt.SmoothTransformation)
self.coverLoaded.emit(image)
|
lightcode/SeriesWatcher
|
serieswatcher/serieswatcher/tasks/getcover.py
|
Python
|
mit
| 506
|
import os
import json
import tensorflow as tf
import numpy as np
def load_data(taxonomy):
data_list= list(filter(lambda x:x[-3:]=='npy', os.listdir(taxonomy)))
result=[]
for data_file in data_list:
result.append(np.load('%s/%s' %(taxonomy, data_file)))
if len(result)%1000== 0:
print("%s: %d/%d" %(taxonomy, len(result), len(data_list)))
return np.array(result, dtype='f')
def train(steps=10000):
archaea_data= load_data('archaea')
bacteria_data= load_data('bacteria')
protozoa_data= load_data('protozoa')
fungi_data= load_data('fungi')
val_len= int(min(len(archaea_data), len(bacteria_data), len(protozoa_data), len(fungi_data))/3)
print('Using %d data for validating each taxonomy' %(val_len))
archaea_train= archaea_data[0:val_len*-1]
bacteria_train= bacteria_data[0:val_len*-1]
protozoa_train= protozoa_data[0:val_len*-1]
fungi_train= fungi_data[0:val_len*-1]
archaea_val= archaea_data[int(len(archaea_data)-val_len):]
bacteria_val= bacteria_data[int(len(bacteria_data)-val_len):]
protozoa_val= protozoa_data[int(len(protozoa_data)-val_len):]
fungi_val= fungi_data[int(len(fungi_data)-val_len):]
x_train= np.concatenate((archaea_train, bacteria_train, protozoa_train, fungi_train), axis=0)
y_train= []
x_val= np.concatenate((archaea_val, bacteria_val, protozoa_val, fungi_val), axis=0)
y_val= []
for i in range(len(archaea_train)):
y_train.append([0])
for i in range(len(bacteria_train)):
y_train.append([1])
for i in range(len(protozoa_train)):
y_train.append([2])
for i in range(len(fungi_train)):
y_train.append([3])
y_train= np.array(y_train)
for i in range(len(archaea_val)):
y_val.append([0])
for i in range(len(bacteria_val)):
y_val.append([1])
for i in range(len(protozoa_val)):
y_val.append([2])
for i in range(len(fungi_val)):
y_val.append([3])
y_val= np.array(y_val)
'''
x= tf.placeholder(tf.float64, [None, x_data.shape[1]])
W= tf.Variable(tf.zeros([x_data.shape[1], 2], dtype=tf.float64))
b= tf.Variable(tf.zeros([2], dtype=tf.float64))
y= tf.matmul(x, W)+b
y_= tf.placeholder(tf.float64, [None, 2])
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
sess= tf.InteractiveSession()
tf.global_variables_initializer().run()
for _ in range(10000):
sess.run(train_step, feed_dict={x:x_data, y_:y_data})
'''
from tensorflow.contrib import learn
import logging
logging.getLogger().setLevel(logging.INFO)
feature_columns = [tf.contrib.layers.real_valued_column("", dimension=x_train.shape[1])]
clf= learn.DNNClassifier(feature_columns=feature_columns, hidden_units=[int(x_train.shape[1]/10),int(x_train.shape[1]/50)], n_classes=4, model_dir='./dnn_model')
clf.fit(x=x_train, y=y_train, steps=steps)
predictions= np.array(list(clf.predict(np.asarray(x_val, dtype=np.float32))))
predictions.shape= y_val.shape
precision= list(predictions==y_val).count(True)/len(y_val)
print('Predict as %f precision' %(precision))
return (clf, predictions)
|
elwlwlwk/GeneSeq2Taxonomy
|
train.py
|
Python
|
mit
| 3,062
|
import os
STIR_ROOT = os.path.normpath(os.path.dirname(__file__))
STIR_APP = os.path.normpath(os.path.join(STIR_ROOT, "application"))
STIR_LIB = os.path.normpath(os.path.join(STIR_ROOT, "library"))
# STIR_SCRIPT = os.path.normpath(os.path.join(STIR_ROOT, "script"))
|
setsulla/stir
|
stir/define.py
|
Python
|
mit
| 267
|
import logging
import socket
from unipath import Path
class BaseConfig(object):
DEBUG = False
TESTING = False
LOG_LEVEL = logging.INFO
SITE_ROOT = Path(__file__).absolute().parent
STDNET_DB_URL = "redis://localhost:6379?db=0"
GEOIP_DB_URL_IPV4 = "http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz"
GEOIP_DB_URL_IPV6 = "http://geolite.maxmind.com/download/geoip/database/GeoLiteCityv6-beta/GeoLiteCityv6.dat.gz"
GEOIP_PATH_V4 = SITE_ROOT.child("data", "GeoLiteCity.dat")
GEOIP_PATH_V6 = SITE_ROOT.child("data", "GeoLiteCityv6.dat")
UNOFFICIAL_MIRRORS = [
"pypi.crate.io",
]
FALLBACK_MIRROR = "a.pypi.python.org"
REDIS = {
'HOST': "localhost",
'PORT': 6379,
'DB': 0,
}
KEY_LAST_UPDATE = "nearest_pypi:last_update"
KEY_MIRROR = "nearest_pypi:mirror:{}:{}"
class Live(BaseConfig):
LOG_LEVEL = logging.INFO
try:
with open("sentry.dsn", "r") as sentry:
SENTRY_DSN = sentry.read()
except IOError:
pass
class Devel(BaseConfig):
DEBUG = True
TESTING = True
LOG_LEVEL = logging.DEBUG
config_map = {
"oxygen.ulo.pe": Live,
"argon": Devel,
"argon.local": Devel,
}
current_hostname = socket.getfqdn()
Config = config_map.get(current_hostname, Live)
STATICA_HACK = True
globals()['kcah_acitats'[::-1].upper()] = False
if STATICA_HACK:
# This is never executed - it is here to make static analyzers happy.
# Taken from https://github.com/celery/kombu/blob/master/kombu/__init__.py#L24-L35
Config = BaseConfig
|
ulope/nearest_pypi
|
config.py
|
Python
|
mit
| 1,600
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os.path import join, isfile
from os import walk
import io
import os
import sys
from shutil import rmtree
from setuptools import find_packages, setup, Command
def read_file(filename):
with open(filename) as fp:
return fp.read().strip()
def read_requirements(filename):
return [line.strip() for line in read_file(filename).splitlines()
if not line.startswith('#')]
NAME = 'gerapy'
FOLDER = 'gerapy'
DESCRIPTION = 'Distributed Crawler Management Framework Based on Scrapy, Scrapyd, Scrapyd-Client, Scrapyd-API, Django and Vue.js'
URL = 'https://github.com/Gerapy/Gerapy'
EMAIL = 'cqc@cuiqingcai.com'
AUTHOR = 'Germey'
REQUIRES_PYTHON = '>=3.5.0'
VERSION = None
REQUIRED = read_requirements('requirements.txt')
here = os.path.abspath(os.path.dirname(__file__))
try:
with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = '\n' + f.read()
except FileNotFoundError:
long_description = DESCRIPTION
about = {}
if not VERSION:
with open(os.path.join(here, FOLDER, '__version__.py')) as f:
exec(f.read(), about)
else:
about['__version__'] = VERSION
def package_files(directories):
paths = []
for item in directories:
if isfile(item):
paths.append(join('..', item))
continue
for (path, directories, filenames) in walk(item):
for filename in filenames:
paths.append(join('..', path, filename))
return paths
class UploadCommand(Command):
description = 'Build and publish the package.'
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print('\033[1m{0}\033[0m'.format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
except OSError:
pass
self.status('Building Source and Wheel (universal) distribution…')
os.system(
'{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPI via Twine…')
os.system('twine upload dist/*')
self.status('Pushing git tags…')
os.system('git tag v{0}'.format(about['__version__']))
os.system('git push --tags')
sys.exit()
setup(
name=NAME,
version=about['__version__'],
description=DESCRIPTION,
long_description=long_description,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=EMAIL,
python_requires=REQUIRES_PYTHON,
url=URL,
packages=find_packages(exclude=('tests',)),
install_requires=REQUIRED,
include_package_data=True,
license='MIT',
entry_points={
'console_scripts': ['gerapy = gerapy.cmd:cmd']
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
],
package_data={
'': package_files([
'gerapy/server/static',
'gerapy/server/core/templates',
'gerapy/templates',
])
},
# $ setup.py publish support.
cmdclass={
'upload': UploadCommand,
},
)
|
Gerapy/Gerapy
|
setup.py
|
Python
|
mit
| 3,683
|
import functools
from pluss.app import app
from pluss.util.cache import Cache
RATE_LIMIT_CACHE_KEY_TEMPLATE = 'pluss--remoteip--ratelimit--1--%s'
def ratelimited(func):
"""Includes the wrapped handler in the global rate limiter (60 calls/min)."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
ratelimit_key = RATE_LIMIT_CACHE_KEY_TEMPLATE % flask.request.remote_addr
# Increment the existing minute's counter, or start a new one if none exists
# (relies on the short-circuiting of 'or')
remote_ip_rate = Cache.incr(ratelimit_key) or Cache.set(ratelimit_key, 1, time=60)
if remote_ip_rate > 60:
if remote_ip_rate in (61, 100, 1000, 10000):
app.logging.info('Rate limited %s - %d requests/min.',
flask.request.remote_addr, remote_ip_rate)
message = 'Rate limit exceeded. Please do not make more than 60 requests per minute.'
return message, 503, {'Retry-After': 60} # Service Unavailable
return func(*args, **kwargs)
return wrapper
# vim: set ts=4 sts=4 sw=4 et:
|
ayust/pluss
|
pluss/util/ratelimit.py
|
Python
|
mit
| 1,111
|
# -*- coding: utf-8 *-*
from collections import OrderedDict
from nicepy.utils import ljust_all, pretty_repr
def get_failed_msg(compare_method, values, expected_values, names=None, expected_names=None):
failed_list = []
names = names or map(str, range(len(values)))
expected_names = expected_names or [''] * len(names)
for value, expected_value, name, expected_name in zip(values, expected_values,
names, expected_names):
#print value, expected_value, name, expected_name
if not compare_method(expected_value, value):
failed_list.append((pretty_repr(value), pretty_repr(expected_value),
name, expected_name))
return _get_failed_msg(failed_list)
def _get_failed_msg(failed_list):
if not failed_list:
return None
msg = 'actual values != expected values:'
failed_list = zip(*map(ljust_all, zip(*failed_list)))
for value_repr, expected_value_repr, name, expected_name in sorted(failed_list):
msg += '\n\t%s' % name
if expected_name:
msg += ' != %s' % expected_name
msg += ': %s != %s' % (value_repr, expected_value_repr)
return msg
def get_multi_failed_msg(assert_method, *lists):
failed_msgs = OrderedDict()
for index, args in enumerate(zip(*lists)):
try:
assert_method(*args)
except AssertionError as e:
failed_msgs[index] = e.message
msg = None
if failed_msgs:
msg = 'Multi-assert failed:'
for index, error_msg in sorted(failed_msgs.iteritems()):
msg += '\nIndex %d: %s' % (index, error_msg)
return msg
|
katakumpo/nicepy
|
nicepy/assertions/helpers.py
|
Python
|
mit
| 1,696
|
#! /usr/bin/env python
"""
Generate random MAC address
Inspired by http://www.linux-kvm.com/sites/default/files/macgen.py
"""
from sys import argv
import random
DEFAULT_OUI = '00-16-3e' # Xensource, Inc.
def random_mac(oui=None):
"""returns a random MAC address, with optional +oui+ override"""
mac_parts = [oui or DEFAULT_OUI]
for limit in [0x7f, 0xff, 0xff]:
mac_parts.append("%02x" % random.randint(0x00, limit))
return '-'.join(mac_parts)
if __name__ == '__main__':
print random_mac(argv[1] if len(argv) > 1 else None)
|
tardate/LittleCodingKata
|
python/random_mac_generation/random_mac.py
|
Python
|
mit
| 558
|
from data import formats
class Player():
def __init__(self, name, position, team, champion=''):
self.name = name
self.position = position
self.team = team
self.champion = champion
def __str__(self):
return formats.player.format(name=self.name, position=self.position,
team=self.team, champion=self.champion)
|
mvy/reddit_live_lcs
|
models/player.py
|
Python
|
mit
| 396
|
# -*- coding: utf-8 -*-
""" Authentication, Authorization, Accouting
@requires: U{B{I{gluon}} <http://web2py.com>}
@copyright: (c) 2010-2012 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["AuthS3",
"S3Permission",
"S3Audit",
"S3RoleManager",
"FaceBookAccount",
"GooglePlusAccount",
]
import datetime
import re
import time
import urllib
from urllib import urlencode
import urllib2
from gluon import *
from gluon.storage import Storage, Messages
from gluon.dal import Field, Row, Query, Set, Table, Expression
from gluon.sqlhtml import CheckboxesWidget, StringWidget
from gluon.tools import Auth, callback, addrow
from gluon.utils import web2py_uuid
from gluon.validators import IS_SLUG
from gluon.contrib import simplejson as json
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon.contrib.login_methods.oauth20_account import OAuthAccount
from s3method import S3Method
from s3validators import IS_ACL
from s3widgets import S3ACLWidget, CheckboxesWidgetS3
from s3utils import s3_mark_required
from s3fields import s3_uid, s3_timestamp, s3_deletion_status
DEFAULT = lambda: None
table_field = re.compile("[\w_]+\.[\w_]+")
DEBUG = False
if DEBUG:
import sys
print >> sys.stderr, "S3AAA: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
# =============================================================================
class AuthS3(Auth):
"""
S3 extensions of the gluon.tools.Auth class
- override:
define_tables()
login()
register()
profile()
verify_email()
requires_membership()
- add:
s3_has_role()
s3_has_permission()
s3_logged_in()
s3_accessible_query()
s3_impersonate()
s3_register() callback
s3_link_to_person()
s3_verify_email_onaccept()
s3_group_members()
s3_user_to_person()
s3_person_to_user()
person_id()
- language
- utc_offset
- organisation
- @ToDo: Facility
"""
# Configuration of UIDs for system roles
S3_SYSTEM_ROLES = Storage(ADMIN = "ADMIN",
AUTHENTICATED = "AUTHENTICATED",
ANONYMOUS = "ANONYMOUS",
EDITOR = "EDITOR",
MAP_ADMIN = "MAP_ADMIN")
def __init__(self):
""" Initialise parent class & make any necessary modifications """
Auth.__init__(self, current.db)
deployment_settings = current.deployment_settings
system_name = deployment_settings.get_system_name()
self.settings.lock_keys = False
self.settings.username_field = False
self.settings.lock_keys = True
self.messages.lock_keys = False
self.messages.registration_pending_approval = "Account registered, however registration is still pending approval - please wait until confirmation received."
self.messages.email_approver_failed = "Failed to send mail to Approver - see if you can notify them manually!"
self.messages.email_verification_failed = "Unable to send verification email - either your email is invalid or our email server is down"
self.messages.email_sent = "Verification Email sent - please check your email to validate. If you do not receive this email please check you junk email or spam filters"
self.messages.email_verified = "Email verified - you can now login"
self.messages.welcome_email_subject = "Welcome to %(system_name)s" % \
dict(system_name=system_name)
self.messages.welcome_email = \
"Welcome to %(system_name)s - click on the link %(url)s to complete your profile" % \
dict(system_name = system_name,
url = deployment_settings.get_base_public_url() + URL("default", "user", args=["profile"]))
self.messages.duplicate_email = "This email address is already in use"
self.messages.registration_disabled = "Registration Disabled!"
self.messages.registration_verifying = "You haven't yet Verified your account - please check your email"
self.messages.label_organisation_id = "Organization"
self.messages.label_site_id = "Facility"
self.messages.label_utc_offset = "UTC Offset"
self.messages.label_image = "Profile Image"
self.messages.help_utc_offset = "The time difference between UTC and your timezone, specify as +HHMM for eastern or -HHMM for western timezones."
self.messages.help_mobile_phone = "Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages."
self.messages.help_organisation = "Entering an Organization is optional, but doing so directs you to the appropriate approver & means you automatically get the appropriate permissions."
self.messages.help_image = "You can either use %(gravatar)s or else upload a picture here. The picture will be resized to 50x50."
#self.messages.logged_in = "Signed In"
#self.messages.submit_button = "Signed In"
#self.messages.logged_out = "Signed Out"
self.messages.lock_keys = True
# S3Permission
self.permission = S3Permission(self)
# Set to True to override any authorization
self.override = False
# Site types (for OrgAuth)
T = current.T
if deployment_settings.get_ui_camp():
shelter = T("Camp")
else:
shelter = T("Shelter")
self.org_site_types = Storage(
cr_shelter = shelter,
#org_facility = T("Facility"),
org_facility = T("Site"),
org_office = T("Office"),
hms_hospital = T("Hospital"),
#project_site = T("Project Site"),
#fire_station = T("Fire Station"),
)
# -------------------------------------------------------------------------
def define_tables(self, migrate=True, fake_migrate=False):
"""
to be called unless tables are defined manually
usages::
# defines all needed tables and table files
# UUID + "_auth_user.table", ...
auth.define_tables()
# defines all needed tables and table files
# "myprefix_auth_user.table", ...
auth.define_tables(migrate="myprefix_")
# defines all needed tables without migration/table files
auth.define_tables(migrate=False)
"""
db = current.db
request = current.request
session = current.session
settings = self.settings
messages = self.messages
# User table
if not settings.table_user:
passfield = settings.password_field
if settings.username_field:
# with username (not used by default in Sahana)
settings.table_user = db.define_table(
settings.table_user_name,
Field("first_name", length=128, default="",
label=messages.label_first_name),
Field("last_name", length=128, default="",
label=messages.label_last_name),
Field("username", length=128, default="",
unique=True),
Field(passfield, "password", length=512,
readable=False, label=messages.label_password),
Field("email", length=512, default="",
label=messages.label_email),
Field("language", length=16),
Field("utc_offset", length=16,
readable=False, writable=False),
Field("organisation_id", "integer",
writable=False,
label=messages.label_organisation_id),
Field("site_id", "integer",
writable=False,
label=messages.label_site_id),
Field("registration_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("reset_password_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("deleted", "boolean", writable=False,
readable=False, default=False),
Field("timestmp", "datetime", writable=False,
readable=False, default=""),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()))
else:
# with email-address (Sahana default)
settings.table_user = db.define_table(
settings.table_user_name,
Field("first_name", length=128, default="",
label=messages.label_first_name),
Field("last_name", length=128, default="",
label=messages.label_last_name),
Field("email", length=512, default="",
label=messages.label_email,
unique=True),
Field(passfield, "password", length=512,
readable=False, label=messages.label_password),
Field("language", length=16),
Field("utc_offset", length=16,
readable=False,
writable=False,
label=messages.label_utc_offset),
Field("organisation_id", "integer",
writable=False,
label=messages.label_organisation_id),
Field("site_id", "integer",
writable=False,
label=messages.label_site_id),
Field("registration_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("reset_password_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("deleted", "boolean", writable=False,
readable=False, default=False),
Field("timestmp", "datetime", writable=False,
readable=False, default=""),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()))
table = settings.table_user
table.first_name.notnull = True
table.first_name.requires = \
IS_NOT_EMPTY(error_message=messages.is_empty)
if current.deployment_settings.get_L10n_mandatory_lastname():
table.last_name.notnull = True
table.last_name.requires = \
IS_NOT_EMPTY(error_message=messages.is_empty)
table.utc_offset.comment = A(SPAN("[Help]"),
_class="tooltip",
_title="%s|%s" % (messages.label_utc_offset,
messages.help_utc_offset))
try:
from s3validators import IS_UTC_OFFSET
table.utc_offset.requires = IS_EMPTY_OR(IS_UTC_OFFSET())
except:
pass
table[passfield].requires = [CRYPT(key=settings.hmac_key,
min_length=self.settings.password_min_length,
digest_alg="sha512")]
if settings.username_field:
table.username.requires = IS_NOT_IN_DB(db,
"%s.username" % settings.table_user._tablename)
table.email.requires = \
[IS_EMAIL(error_message=messages.invalid_email),
IS_LOWER(),
IS_NOT_IN_DB(db,
"%s.email" % settings.table_user._tablename,
error_message=messages.duplicate_email)]
table.registration_key.default = ""
# Group table (roles)
if not settings.table_group:
settings.table_group = db.define_table(
settings.table_group_name,
# Group unique ID, must be notnull+unique:
Field("uuid",
length=64,
notnull=True,
unique=True,
readable=False,
writable=False),
# Group does not appear in the Role Manager:
# (can neither assign, nor modify, nor delete)
Field("hidden", "boolean",
readable=False,
writable=False,
default=False),
# Group cannot be modified in the Role Manager:
# (can assign, but neither modify nor delete)
Field("system", "boolean",
readable=False,
writable=False,
default=False),
# Group cannot be deleted in the Role Manager:
# (can assign and modify, but not delete)
Field("protected", "boolean",
readable=False,
writable=False,
default=False),
# Role name:
Field("role",
length=512,
default="",
unique=True,
label=messages.label_role),
Field("description", "text",
label=messages.label_description),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_timestamp()+s3_deletion_status()))
table = settings.table_group
table.role.requires = IS_NOT_IN_DB(db, "%s.role"
% settings.table_group._tablename)
# Group membership table (user<->role)
if not settings.table_membership:
settings.table_membership = db.define_table(
settings.table_membership_name,
Field("user_id", settings.table_user,
label=messages.label_user_id),
Field("group_id", settings.table_group,
label=messages.label_group_id),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()+s3_deletion_status()))
table = settings.table_membership
table.user_id.requires = IS_IN_DB(db, "%s.id" %
settings.table_user._tablename,
"%(id)s: %(first_name)s %(last_name)s")
table.group_id.requires = IS_IN_DB(db, "%s.id" %
settings.table_group._tablename,
"%(id)s: %(role)s")
security_policy = current.deployment_settings.get_security_policy()
# Define Eden permission table
self.permission.define_table(migrate=migrate,
fake_migrate=fake_migrate)
if security_policy not in (1, 2, 3, 4, 5, 6) and \
not settings.table_permission:
# Permissions table (group<->permission)
# NB This Web2Py table is deprecated / replaced in Eden by S3Permission
settings.table_permission = db.define_table(
settings.table_permission_name,
Field("group_id", settings.table_group,
label=messages.label_group_id),
Field("name", default="default", length=512,
label=messages.label_name),
Field("table_name", length=512,
label=messages.label_table_name),
Field("record_id", "integer",
label=messages.label_record_id),
migrate = migrate,
fake_migrate=fake_migrate)
table = settings.table_permission
table.group_id.requires = IS_IN_DB(db, "%s.id" %
settings.table_group._tablename,
"%(id)s: %(role)s")
table.name.requires = IS_NOT_EMPTY()
table.table_name.requires = IS_IN_SET(db.tables)
table.record_id.requires = IS_INT_IN_RANGE(0, 10 ** 9)
# Event table (auth log)
# Records Logins & ?
# @ToDo: Deprecate? At least make it configurable?
if not settings.table_event:
settings.table_event = db.define_table(
settings.table_event_name,
Field("time_stamp", "datetime",
default=request.now,
label=messages.label_time_stamp),
Field("client_ip",
default=request.client,
label=messages.label_client_ip),
Field("user_id", settings.table_user, default=None,
requires = IS_IN_DB(db, "%s.id" %
settings.table_user._tablename,
"%(id)s: %(first_name)s %(last_name)s"),
label=messages.label_user_id),
Field("origin", default="auth", length=512,
label=messages.label_origin,
requires = IS_NOT_EMPTY()),
Field("description", "text", default="",
label=messages.label_description,
requires = IS_NOT_EMPTY()),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()+s3_deletion_status()))
# -------------------------------------------------------------------------
def login_bare(self, username, password):
"""
Logs user in
- extended to understand session.s3.roles
"""
request = current.request
session = current.session
db = current.db
table_user = self.settings.table_user
table_membership = self.settings.table_membership
if self.settings.login_userfield:
userfield = self.settings.login_userfield
elif "username" in table_user.fields:
userfield = "username"
else:
userfield = "email"
passfield = self.settings.password_field
user = db(table_user[userfield] == username).select().first()
password = table_user[passfield].validate(password)[0]
if user:
user_id = user.id
if not user.registration_key and user[passfield] == password:
user = Storage(table_user._filter_fields(user, id=True))
session.auth = Storage(user=user,
last_visit=request.now,
expiration=self.settings.expiration)
self.user = user
self.set_roles()
return user
return False
# -------------------------------------------------------------------------
def set_roles(self):
"""
Update session roles and pe_id for the current user
"""
if self.user:
db = current.db
session = current.session
table_user = self.settings.table_user
table_membership = self.settings.table_membership
user_id = self.user.id
# Add the Roles to session.s3
roles = []
query = (table_membership.deleted != True) & \
(table_membership.user_id == user_id)
rows = db(query).select(table_membership.group_id)
session.s3.roles = [s.group_id for s in rows]
# Set pe_id for current user
ltable = current.s3db.pr_person_user
if ltable is not None:
query = (ltable.user_id == user_id)
row = db(query).select(ltable.pe_id, limitby=(0, 1)).first()
if row:
session.auth.user["pe_id"] = row.pe_id
return
# -------------------------------------------------------------------------
def set_cookie(self):
"""
Set a Cookie to the client browser so that we know this user has
registered & so we should present them with a login form instead
of a register form
"""
response = current.response
response.cookies["registered"] = "yes"
response.cookies["registered"]["expires"] = 365 * 24 * 3600 # 1 year
response.cookies["registered"]["path"] = "/"
# -------------------------------------------------------------------------
def login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT):
"""
Overrides Web2Py's login() to use custom flash styles & utcnow
@returns: a login form
"""
db = current.db
table_user = self.settings.table_user
if self.settings.login_userfield:
username = self.settings.login_userfield
elif "username" in table_user.fields:
username = "username"
else:
username = "email"
old_requires = table_user[username].requires
table_user[username].requires = [IS_NOT_EMPTY(), IS_LOWER()]
request = current.request
response = current.response
session = current.session
passfield = self.settings.password_field
try:
table_user[passfield].requires[-1].min_length = 0
except:
pass
if next is DEFAULT:
next = request.vars._next or self.settings.login_next
if onvalidation is DEFAULT:
onvalidation = self.settings.login_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.login_onaccept
if log is DEFAULT:
log = self.messages.login_log
user = None # default
# Do we use our own login form, or from a central source?
if self.settings.login_form == self:
form = SQLFORM(
table_user,
fields=[username, passfield],
hidden=dict(_next=request.vars._next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if self.settings.remember_me_form:
# Add a new input checkbox "remember me for longer"
addrow(form,XML(" "),
DIV(XML(" "),
INPUT(_type='checkbox',
_class='checkbox',
_id="auth_user_remember",
_name="remember",
),
XML(" "),
LABEL(
self.messages.label_remember_me,
_for="auth_user_remember",
)),"",
self.settings.formstyle,
'auth_user_remember__row')
captcha = self.settings.login_captcha or \
(self.settings.login_captcha!=False and self.settings.captcha)
if captcha:
addrow(form, captcha.label, captcha, captcha.comment,
self.settings.formstyle,'captcha__row')
accepted_form = False
if form.accepts(request.vars, session,
formname="login", dbio=False,
onvalidation=onvalidation):
accepted_form = True
if username == "email":
# Check for Domains which can use Google's SMTP server for passwords
# @ToDo: an equivalent email_domains for other email providers
gmail_domains = current.deployment_settings.get_auth_gmail_domains()
if gmail_domains:
from gluon.contrib.login_methods.email_auth import email_auth
domain = form.vars[username].split("@")[1]
if domain in gmail_domains:
self.settings.login_methods.append(
email_auth("smtp.gmail.com:587", "@%s" % domain))
# Check for username in db
query = (table_user[username] == form.vars[username])
user = db(query).select().first()
if user:
# user in db, check if registration pending or disabled
temp_user = user
if temp_user.registration_key == "pending":
response.warning = self.messages.registration_pending
return form
elif temp_user.registration_key in ("disabled", "blocked"):
response.error = self.messages.login_disabled
return form
elif not temp_user.registration_key is None and \
temp_user.registration_key.strip():
response.warning = \
self.messages.registration_verifying
return form
# Try alternate logins 1st as these have the
# current version of the password
user = None
for login_method in self.settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if not self in self.settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(form.vars)
break
if not user:
# Alternates have failed, maybe because service inaccessible
if self.settings.login_methods[0] == self:
# Try logging in locally using cached credentials
if temp_user[passfield] == form.vars.get(passfield, ""):
# Success
user = temp_user
else:
# User not in db
if not self.settings.alternate_requires_registration:
# We're allowed to auto-register users from external systems
for login_method in self.settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if not self in self.settings.login_methods:
# Do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(form.vars)
break
if not user:
self.log_event(self.settings.login_failed_log,
request.post_vars)
# Invalid login
session.error = self.messages.invalid_login
redirect(self.url(args=request.args,
vars=request.get_vars))
else:
# Use a central authentication server
cas = self.settings.login_form
cas_user = cas.get_user()
if cas_user:
cas_user[passfield] = None
user = self.get_or_create_user(table_user._filter_fields(cas_user))
form = Storage()
form.vars = user
self.s3_register(form)
elif hasattr(cas, "login_form"):
return cas.login_form()
else:
# we need to pass through login again before going on
next = "%s?_next=%s" % (URL(r=request), next)
redirect(cas.login_url(next))
# Process authenticated users
if user:
user = Storage(table_user._filter_fields(user, id=True))
# If the user hasn't set a personal UTC offset,
# then read the UTC offset from the form:
if not user.utc_offset:
user.utc_offset = session.s3.utc_offset
session.auth = Storage(
user=user,
last_visit=request.now,
expiration = request.vars.get("remember", False) and \
self.settings.long_expiration or self.settings.expiration,
remember = request.vars.has_key("remember"),
hmac_key = web2py_uuid()
)
self.user = user
self.set_roles()
# Read their language from the Profile
language = user.language
current.T.force(language)
session.s3.language = language
session.confirmation = self.messages.logged_in
# Set a Cookie to present user with login box by default
self.set_cookie()
# Update the timestamp of the User so we know when they last logged-in
db(table_user.id == self.user.id).update(timestmp = request.utcnow)
if log and self.user:
self.log_event(log % self.user)
# How to continue
if self.settings.login_form == self:
if accepted_form:
if onaccept:
onaccept(form)
if isinstance(next, (list, tuple)):
# fix issue with 2.6
next = next[0]
if next and not next[0] == "/" and next[:4] != "http":
next = self.url(next.replace("[id]", str(form.vars.id)))
redirect(next)
table_user[username].requires = old_requires
return form
else:
redirect(next)
# -------------------------------------------------------------------------
def register(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT):
"""
Overrides Web2Py's register() to add new functionality:
- Checks whether registration is permitted
- Custom Flash styles
- Allow form to be embedded in other pages
- Optional addition of Mobile Phone field to the Register form
- Optional addition of Organisation field to the Register form
- Lookup Domains/Organisations to check for Whitelists
&/or custom Approver
@returns: a registration form
"""
db = current.db
settings = self.settings
messages = self.messages
request = current.request
response = current.response
session = current.session
deployment_settings = current.deployment_settings
# S3: Don't allow registration if disabled
self_registration = deployment_settings.get_security_self_registration()
if not self_registration:
session.error = messages.registration_disabled
redirect(URL(args=["login"]))
if self.is_logged_in() and request.function != "index":
redirect(settings.logged_url)
if next == DEFAULT:
next = request.vars._next or settings.register_next
if onvalidation == DEFAULT:
onvalidation = settings.register_onvalidation
if onaccept == DEFAULT:
onaccept = settings.register_onaccept
if log == DEFAULT:
log = messages.register_log
user = settings.table_user
passfield = settings.password_field
# S3: Organisation field in form?
if deployment_settings.get_auth_registration_requests_organisation():
# Widget set in controllers/default.py
#user.organisation_id.widget =
user.organisation_id.writable = True
if deployment_settings.get_auth_registration_organisation_mandatory():
user.organisation_id.comment = SPAN("*", _class="req")
else:
user.organisation_id.comment = DIV(_class="tooltip",
_title="%s|%s" % (messages.label_organisation_id,
messages.help_organisation))
else:
user.organisation_id.readable = False
user.organisation_id.writable = False
user.organisation_id.default = deployment_settings.get_auth_registration_organisation_id_default()
# @ToDo: Option to request Facility during Registration
user.site_id.readable = False
labels, required = s3_mark_required(user)
#formstyle = current.manager.s3.crud.formstyle
form = SQLFORM(user, hidden=dict(_next=request.vars._next),
labels = labels,
separator = "",
showid=settings.showid,
submit_button=messages.submit_button,
delete_label=messages.delete_label,
#formstyle = formstyle
)
for i, row in enumerate(form[0].components):
item = row[1][0]
if isinstance(item, INPUT) and item["_name"] == passfield:
field_id = "%s_password_two" % user._tablename
#row = formstyle(...)
form[0].insert(i + 1, TR(
TD(LABEL("%s:" % messages.verify_password,
_for="password_two",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
INPUT(_name="password_two",
_id=field_id,
_type="password",
requires=IS_EXPR("value==%s" % \
repr(request.vars.get(passfield, None)),
error_message=messages.mismatched_password)),
SPAN("*", _class="req"),
"", _id=field_id + SQLFORM.ID_ROW_SUFFIX))
#form[0].insert(i + 1, row)
# add an opt in clause to receive emails depending on the deployment settings
if deployment_settings.get_auth_opt_in_to_email():
field_id = "%s_opt_in" % user._tablename
comment = DIV(DIV(_class="tooltip",
_title="%s|%s" % ("Mailing list",
"By selecting this you agree that we may contact you.")))
checked = deployment_settings.get_auth_opt_in_default() and "selected"
form[0].insert(-1,
TR(TD(LABEL("%s:" % "Receive updates",
_for="opt_in",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
INPUT(_name="opt_in", _id=field_id, _type="checkbox", _checked=checked),
TD(comment,
_class="w2p_fc"),
_id=field_id + SQLFORM.ID_ROW_SUFFIX))
# S3: Insert Mobile phone field into form
if deployment_settings.get_auth_registration_requests_mobile_phone():
field_id = "%s_mobile" % user._tablename
if deployment_settings.get_auth_registration_mobile_phone_mandatory():
comment = SPAN("*", _class="req")
else:
comment = DIV(_class="tooltip",
_title="%s|%s" % (deployment_settings.get_ui_label_mobile_phone(),
messages.help_mobile_phone))
form[0].insert(-1,
TR(TD(LABEL("%s:" % deployment_settings.get_ui_label_mobile_phone(),
_for="mobile",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
INPUT(_name="mobile", _id=field_id),
TD(comment,
_class="w2p_fc"),
_id=field_id + SQLFORM.ID_ROW_SUFFIX))
# S3: Insert Photo widget into form
if deployment_settings.get_auth_registration_requests_image():
label = self.messages.label_image
comment = DIV(_class="stickytip",
_title="%s|%s" % (label,
self.messages.help_image % \
dict(gravatar = A("Gravatar",
_target="top",
_href="http://gravatar.com"))))
field_id = "%s_image" % user._tablename
widget = SQLFORM.widgets["upload"].widget(current.s3db.pr_image.image, None)
form[0].insert(-1,
TR(TD(LABEL("%s:" % label,
_for="image",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
widget,
TD(comment,
_class="w2p_fc"),
_id=field_id + SQLFORM.ID_ROW_SUFFIX))
if settings.captcha != None:
form[0].insert(-1, TR("", settings.captcha, ""))
import uuid
user.registration_key.default = key = str(uuid.uuid4())
if form.accepts(request.vars, session, formname="register",
onvalidation=onvalidation):
if settings.create_user_groups:
# Not used in S3
description = \
"group uniquely assigned to %(first_name)s %(last_name)s"\
% form.vars
group_id = self.add_group("user_%s" % form.vars.id,
description)
self.add_membership(group_id, form.vars.id)
approved = False
users = db(settings.table_user.id > 0).count()
if users == 1:
# 1st user to register shouldn't need verification/approval
approved = True
elif settings.registration_requires_verification:
# Ensure that we add to the correct Organization
approver, organisation_id = self.s3_approver(form.vars)
if organisation_id:
# @ToDo: Is it correct to override the organisation entered by the user?
# Ideally (if the deployment_settings.auth.registration_requests_organisation = True
# the org could be selected based on the email and the user could then override
form.vars.organisation = organisation_id
# Send the Verification email
if not settings.mailer or \
not settings.mailer.send(to=form.vars.email,
subject=messages.verify_email_subject,
message=messages.verify_email % dict(key=key)):
db.rollback()
response.error = messages.email_verification_failed
return form
# @ToDo: Deployment Setting?
#session.confirmation = messages.email_sent
next = URL(c="default", f="message",
args = ["verify_email_sent"],
vars = {"email": form.vars.email})
elif settings.registration_requires_approval:
# Identify the Approver &
# ensure that we add to the correct Organization
approver, organisation_id = self.s3_approver(form.vars)
if organisation_id:
form.vars.organisation_id = organisation_id
if approver:
# Send the Authorisation email
form.vars.approver = approver
if not settings.mailer or \
not settings.verify_email_onaccept(form.vars):
# We don't wish to prevent registration if the approver mail fails to send
#db.rollback()
session.error = messages.email_approver_failed
#return form
user[form.vars.id] = dict(registration_key="pending")
session.warning = messages.registration_pending_approval
else:
# The domain is Whitelisted
approved = True
else:
# No verification or approval needed
approved = True
approver, organisation_id = self.s3_approver(form.vars)
if organisation_id:
form.vars.organisation = organisation_id
form.vars.registration_key = ""
form.vars.approver = approver
settings.verify_email_onaccept(form.vars)
# Set a Cookie to present user with login box by default
self.set_cookie()
if approved:
user[form.vars.id] = dict(registration_key="")
session.confirmation = messages.registration_successful
table_user = settings.table_user
if "username" in table_user.fields:
username = "username"
else:
username = "email"
query = (table_user[username] == form.vars[username])
user = db(query).select(limitby=(0, 1)).first()
user = Storage(table_user._filter_fields(user, id=True))
if users == 1:
# Add the first user to admin group
admin_group_id = 1
self.add_membership(admin_group_id, user.id)
# If the user hasn't set a personal UTC offset,
# then read the UTC offset from the form:
if not user.utc_offset:
user.utc_offset = session.s3.utc_offset
session.auth = Storage(user=user, last_visit=request.now,
expiration=settings.expiration)
self.user = user
session.flash = messages.logged_in
if log:
self.log_event(log % form.vars)
if onaccept:
onaccept(form)
if not next:
next = self.url(args = request.args)
elif isinstance(next, (list, tuple)):
# fix issue with 2.6
next = next[0]
elif next and not next[0] == "/" and next[:4] != "http":
next = self.url(next.replace("[id]", str(form.vars.id)))
redirect(next)
return form
# -------------------------------------------------------------------------
def profile(
self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
returns a form that lets the user change his/her profile
.. method:: Auth.profile([next=DEFAULT [, onvalidation=DEFAULT
[, onaccept=DEFAULT [, log=DEFAULT]]]])
Patched for S3 to use s3_mark_required
"""
table_user = self.settings.table_user
if not self.is_logged_in():
redirect(self.settings.login_url)
passfield = self.settings.password_field
self.settings.table_user[passfield].writable = False
request = current.request
session = current.session
if next == DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.profile_next
if onvalidation == DEFAULT:
onvalidation = self.settings.profile_onvalidation
if onaccept == DEFAULT:
onaccept = self.settings.profile_onaccept
if log == DEFAULT:
log = self.messages.profile_log
labels, required = s3_mark_required(table_user)
form = SQLFORM(
table_user,
self.user.id,
fields = self.settings.profile_fields,
labels = labels,
hidden = dict(_next=next),
showid = self.settings.showid,
submit_button = self.messages.profile_save_button,
delete_label = self.messages.delete_label,
upload = self.settings.download_url,
formstyle = self.settings.formstyle,
separator=""
)
if form.accepts(request, session,
formname='profile',
onvalidation=onvalidation,hideerror=self.settings.hideerror):
self.user.update(table_user._filter_fields(form.vars))
session.flash = self.messages.profile_updated
if log:
self.log_event(log % self.user)
callback(onaccept,form)
if not next:
next = self.url(args=request.args)
elif isinstance(next, (list, tuple)): ### fix issue with 2.6
next = next[0]
elif next and not next[0] == '/' and next[:4] != 'http':
next = self.url(next.replace('[id]', str(form.vars.id)))
redirect(next)
return form
# -------------------------------------------------------------------------
def s3_lookup_org_role(self, organisation_id):
"""
Lookup the Organisation Access Role from the ID of the Organisation
"""
if not organisation_id:
return None
db = current.db
s3db = current.s3db
table = s3db.org_organisation
query = (table.id == organisation_id)
org = db(query).select(table.owned_by_organisation).first()
if org:
return org.owned_by_organisation
return None
# -------------------------------------------------------------------------
def s3_impersonate(self, user_id):
"""
S3 framework function
Designed to be used within tasks, which are run in a separate request
& hence don't have access to current.auth
@param user_id: auth.user.id
"""
session = current.session
db = current.db
if not user_id:
# Anonymous
return None
table_user = self.settings.table_user
user = db(table_user.id == user_id).select(limitby=(0, 1)).first()
if not user:
# Invalid user ID
return False
roles = []
table_membership = self.settings.table_membership
memberships = db(table_membership.user_id == user.id).select(
table_membership.group_id)
roles = [m.group_id for m in memberships]
if session.s3.system_roles.ANONYMOUS:
roles.append(session.s3.system_roles.ANONYMOUS)
session.s3.roles = roles
# Set the language from the Profile
language = user.language
current.T.force(language)
current.session.s3.language = language
user = Storage(table_user._filter_fields(user, id=True))
# Use this user
self.user = user
return user
# -------------------------------------------------------------------------
def s3_register(self, form):
"""
S3 framework function
Designed to be used as an onaccept callback for register()
Whenever someone registers, it:
- adds them to the 'Authenticated' role
- adds their name to the Person Registry
- creates their profile picture
- creates an HRM record
- adds them to the Org_x Access role
"""
db = current.db
manager = current.manager
s3db = current.s3db
vars = form.vars
user_id = vars.id
if not user_id:
return None
# Add to 'Authenticated' role
authenticated = self.id_group("Authenticated")
self.add_membership(authenticated, user_id)
# Link to organisation, lookup org role
organisation_id = self.s3_link_to_organisation(vars)
if organisation_id:
owned_by_organisation = self.s3_lookup_org_role(organisation_id)
else:
owned_by_organisation = None
# Add to Person Registry and Email/Mobile to pr_contact
person_id = self.s3_link_to_person(vars, # user
owned_by_organisation)
if "image" in vars:
if hasattr(vars.image, "file"):
source_file = vars.image.file
original_filename = vars.image.filename
ptable = s3db.pr_person
query = (ptable.id == person_id)
pe_id = db(query).select(ptable.pe_id,
limitby=(0, 1)).first()
if pe_id:
pe_id = pe_id.pe_id
itable = s3db.pr_image
field = itable.image
newfilename = field.store(source_file, original_filename, field.uploadfolder)
url = URL(c="default", f="download", args=newfilename)
fields = dict(pe_id=pe_id,
profile=True,
image=newfilename,
url = url,
title=current.T("Profile Picture"))
if isinstance(field.uploadfield, str):
fields[field.uploadfield] = source_file.read()
itable.insert(**fields)
htable = s3db.table("hrm_human_resource")
if htable and organisation_id:
# Create an HRM entry, if one doesn't already exist
query = (htable.person_id == person_id) & \
(htable.organisation_id == organisation_id)
row = db(query).select(htable.id, limitby=(0, 1)).first()
if not row:
if current.deployment_settings.get_hrm_show_staff():
type = 1 # Staff
else:
type = 2 # Volunteer
id = htable.insert(person_id=person_id,
organisation_id=organisation_id,
type=type,
owned_by_user=user_id,
owned_by_organisation=owned_by_organisation)
record = Storage(id=id)
manager.model.update_super(htable, record)
if owned_by_organisation:
# Add user to the Org Access Role
table = self.settings.table_membership
query = (table.deleted != True) & \
(table.user_id == user_id) & \
(table.group_id == owned_by_organisation)
if not db(query).select(table.id,
limitby=(0, 1)).first():
table.insert(user_id=user_id,
group_id=owned_by_organisation)
# Return person_id for init scripts
return person_id
# -------------------------------------------------------------------------
def s3_link_to_organisation(self, user):
"""
Link a user account to an organisation
@param user: the user account record (= form.vars in s3_register)
"""
db = current.db
s3db = current.s3db
manager = current.manager
organisation_id = user.organisation_id
if not organisation_id:
otable = s3db.org_organisation
name = user.get("organisation_name", None)
acronym = user.get("organisation_acronym", None)
if name:
# Create new organisation
organisation_id = otable.insert(name=name,
acronym=acronym)
# Update the super-entities
record = Storage(id=organisation_id)
manager.model.update_super(otable, record)
# Set record ownership
self.s3_set_record_owner(otable, organisation_id)
user.organisation_id = organisation_id
# Update user record
query = (utable.id == user_id)
db(query).update(organisation_id=organisation_id)
if not organisation_id:
return None
# Create link (if it doesn't exist)
user_id = user.id
ltable = s3db.org_organisation_user
if ltable:
query = (ltable.user_id == user_id) & \
(ltable.organisation_id == organisation_id)
row = db(query).select(ltable.id, limitby=(0, 1)).first()
if not row:
ltable.insert(user_id=user_id,
organisation_id=organisation_id)
return organisation_id
# -------------------------------------------------------------------------
def s3_link_to_person(self,
user=None,
owned_by_organisation=None):
"""
Links user accounts to person registry entries
@param user: the user record
@param owned_by_organisation: the role of the owner organisation
Policy for linking to pre-existing person records:
If a person record with exactly the same first name and
last name exists, which has a contact information record
with exactly the same email address as used in the user
account, and is not linked to another user account, then
this person record will be linked to this user account.
Otherwise, a new person record is created, and a new email
contact record with the email address from the user record
is registered for that person.
"""
db = current.db
s3db = current.s3db
utable = self.settings.table_user
ptable = s3db.pr_person
ctable = s3db.pr_contact
atable = s3db.pr_address
etable = s3db.pr_pentity
ttable = s3db.sit_trackable
gtable = s3db.gis_config
ltable = s3db.pr_person_user
left = [ltable.on(ltable.user_id == utable.id),
ptable.on(ptable.pe_id == ltable.pe_id)]
if user is not None:
if not isinstance(user, (list, tuple)):
user = [user]
user_ids = [u.id for u in user]
query = (utable.id.belongs(user_ids))
else:
query = (utable.id != None)
users = db(query).select(utable.id,
utable.first_name,
utable.last_name,
utable.email,
ltable.pe_id,
ptable.id,
left=left, distinct=True)
utn = utable._tablename
person_ids = [] # Collect the person IDs
for u in users:
person = u.pr_person
if person.id is not None:
person_ids.append(person.id)
continue
user = u[utn]
owner = Storage(owned_by_user=user.id,
owned_by_organisation=owned_by_organisation)
if "email" in user:
# Try to find a matching person record
first_name = user.first_name
last_name = user.last_name
email = user.email.lower()
query = (ptable.first_name == first_name) & \
(ptable.last_name == last_name) & \
(ctable.pe_id == ptable.pe_id) & \
(ctable.contact_method == "EMAIL") & \
(ctable.value.lower() == email)
person = db(query).select(ptable.id,
ptable.pe_id,
limitby=(0, 1)).first()
if person and \
not db(ltable.pe_id == person.pe_id).count():
# Match found, and it isn't linked to another user account
# Insert a link
ltable.insert(user_id=user.id, pe_id=person.pe_id)
# Assign ownership of the Person record
person.update_record(**owner)
# Assign ownership of the Contact record(s)
query = (ctable.pe_id == person.pe_id)
db(query).update(**owner)
# Assign ownership of the Address record(s)
query = (atable.pe_id == person.pe_id)
db(query).update(**owner)
# Assign ownership of the Config record(s)
query = (gtable.pe_id == person.pe_id)
db(query).update(**owner)
# HR records
self.s3_register_staff(user.id, person.id)
# Set pe_id if this is the current user
if self.user and self.user.id == user.id:
self.user.pe_id = person.pe_id
person_ids.append(person.id)
continue
# Create a PE
pe_id = etable.insert(instance_type="pr_person",
deleted=False)
# Create a TE
track_id = ttable.insert(instance_type="pr_person",
deleted=False)
if pe_id:
# Create a new person record
if current.request.vars.get("opt_in", None):
opt_in = current.deployment_settings.get_auth_opt_in_team_list()
else:
opt_in = ""
new_id = ptable.insert(pe_id = pe_id,
track_id = track_id,
first_name = first_name,
last_name = last_name,
opt_in = opt_in,
modified_by = user.id,
**owner)
if new_id:
# Insert a link
ltable.insert(user_id=user.id, pe_id=pe_id)
# Register the new person UUID in the PE and TE
person_uuid = ptable[new_id].uuid
db(etable.id == pe_id).update(uuid=person_uuid)
db(ttable.id == track_id).update(uuid=person_uuid)
# Add the email to pr_contact
ctable.insert(pe_id = pe_id,
contact_method = "EMAIL",
priority = 1,
value = email,
**owner)
# Add the mobile to pr_contact
mobile = current.request.vars.get("mobile", None)
if mobile:
ctable.insert(
pe_id = pe_id,
contact_method = "SMS",
priority = 2,
value = mobile,
**owner)
person_ids.append(new_id)
# Add the user to each team if they have chosen to opt-in
g_table = s3db["pr_group"]
gm_table = s3db["pr_group_membership"]
for team in opt_in:
query = (g_table.name == team)
team_rec = db(query).select(g_table.id, limitby=(0, 1)).first()
# if the team doesn't exist then add it
if team_rec == None:
team_id = g_table.insert(name = team, group_type = 5)
else:
team_id = team_rec.id
gm_table.insert(group_id = team_id,
person_id = new_id)
# Set pe_id if this is the current user
if self.user and self.user.id == user.id:
self.user.pe_id = pe_id
if len(person_ids) == 1:
return person_ids[0]
else:
return person_ids
# -------------------------------------------------------------------------
def s3_approver(self, user):
"""
Returns the Approver for a new Registration &
the organisation_id field
@param: user - the user record (form.vars when done direct)
"""
db = current.db
s3db = current.s3db
deployment_settings = current.deployment_settings
# Default Approver
approver = deployment_settings.get_mail_approver()
organisation_id = None
# Check for Domain: Whitelist or specific Approver
table = s3db.auth_organisation
address, domain = user.email.split("@", 1)
query = (table.domain == domain)
record = db(query).select(table.organisation_id,
table.approver,
limitby=(0, 1)).first()
if record:
organisation_id = record.organisation_id
approver = record.approver
elif deployment_settings.get_auth_registration_requests_organisation():
# Check for an Organization-specific Approver
organisation_id = user.get("organisation_id",
None)
if organisation_id:
query = (table.organisation_id == organisation_id)
record = db(query).select(table.approver,
limitby=(0, 1)).first()
if record and record.approver:
approver = record.approver
return approver, organisation_id
# -------------------------------------------------------------------------
def verify_email(self,
next=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT):
"""
action user to verify the registration email, XXXXXXXXXXXXXXXX
.. method:: Auth.verify_email([next=DEFAULT [, onvalidation=DEFAULT
[, onaccept=DEFAULT [, log=DEFAULT]]]])
"""
db = current.db
settings = self.settings
messages = self.messages
deployment_settings = current.deployment_settings
key = current.request.args[-1]
table_user = settings.table_user
user = db(table_user.registration_key == key).select().first()
if not user:
redirect(settings.verify_email_next)
# S3: Lookup the Approver
approver, organisation_id = self.s3_approver(user)
if settings.registration_requires_approval and approver:
user.update_record(registration_key = "pending")
current.session.flash = messages.registration_pending_approval
else:
user.update_record(registration_key = "")
current.session.flash = messages.email_verified
if log == DEFAULT:
log = messages.verify_email_log
if next == DEFAULT:
next = settings.verify_email_next
if onaccept == DEFAULT:
onaccept = settings.verify_email_onaccept
if log:
self.log_event(log % user)
if approver:
user.approver = approver
callback(onaccept, user)
redirect(next)
# -------------------------------------------------------------------------
def s3_verify_email_onaccept(self, form):
""""
Sends a message to the approver to notify them if a user needs approval
If deployment_settings.auth.always_notify_approver = True,
send them notification regardless
"""
if form.registration_key == "": # User Approved
if not current.deployment_settings.get_auth_always_notify_approver():
return
subject = current.T("%(system_name)s - New User Registered") % \
{"system_name": current.deployment_settings.get_system_name()}
message = self.messages.new_user % dict(first_name = form.first_name,
last_name = form.last_name,
email = form.email)
else:
subject = current.T("%(system_name)s - New User Registration Approval Pending") % \
{"system_name": current.deployment_settings.get_system_name()}
message = self.messages.approve_user % \
dict(first_name=form.first_name,
last_name=form.last_name,
email=form.email)
result = self.settings.mailer.send(to=form.approver,
subject=subject,
message=message)
return result
# -------------------------------------------------------------------------
def s3_register_staff(self, user_id, person_id):
"""
Take ownership of the HR records of the person record,
and add user to the Org Access role.
To be called by s3_link_to_person in case a newly registered
user record gets linked to a prior existing person record.
@param user_id: the user record ID
@param person_id: the person record ID
"""
db = current.db
s3db = current.s3db
manager = current.manager
htable = s3db.table("hrm_human_resource")
if htable is None:
# HR module disabled: skip
return
rtable = self.settings.table_group
mtable = self.settings.table_membership
utable = self.settings.table_user
# User owns their own HRM records
query = (htable.person_id == person_id)
db(query).update(owned_by_user=user_id)
query &= ((htable.status == 1) &
(htable.deleted != True))
rows = db(query).select(htable.owned_by_organisation)
org_roles = []
for row in rows:
org_role = row.owned_by_organisation
if org_role and org_role not in org_roles:
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id == org_role)
if not db(query).select(limitby=(0, 1)).first():
org_roles.append(dict(user_id=user_id,
group_id=org_role))
if org_roles:
mtable.bulk_insert(org_roles)
# -------------------------------------------------------------------------
def s3_logged_in(self):
"""
Check whether the user is currently logged-in
- tries Basic if not
"""
if self.override:
return True
session = current.session
if not self.is_logged_in():
basic = self.basic()
try:
return basic[2]
except TypeError:
# old web2py
return basic
except:
return False
return True
# -------------------------------------------------------------------------
# Role Management
# -------------------------------------------------------------------------
def get_system_roles(self):
"""
Get the IDs of the session roles by their UIDs, and store them
into the current session. To be run once per session, as these
IDs should never change.
Caution: do NOT cache the result, otherwise a newly installed
system would be completely open during the caching period!
"""
session = current.session
try:
if session.s3.system_roles:
return session.s3.system_roles
except:
pass
db = current.db
rtable = self.settings.table_group
if rtable is not None:
system_roles = self.S3_SYSTEM_ROLES
query = (rtable.deleted != True) & \
rtable.uuid.belongs(system_roles.values())
rows = db(query).select(rtable.id, rtable.uuid)
sr = Storage([(role.uuid, role.id) for role in rows])
else:
sr = Storage([(uid, None) for uid in self.S3_SYSTEM_ROLES])
session.s3.system_roles = sr
return sr
# -------------------------------------------------------------------------
def s3_create_role(self, role, description=None, *acls, **args):
"""
Back-end method to create roles with ACLs
@param role: display name for the role
@param description: description of the role (optional)
@param acls: list of initial ACLs to assign to this role
@param args: keyword arguments (see below)
@keyword name: a unique name for the role
@keyword hidden: hide this role completely from the RoleManager
@keyword system: role can be assigned, but neither modified nor
deleted in the RoleManager
@keyword protected: role can be assigned and edited, but not
deleted in the RoleManager
"""
table = self.settings.table_group
hidden = args.get("hidden", False)
system = args.get("system", False)
protected = args.get("protected", False)
uid = args.get("uid", None)
if uid:
query = (table.uuid == uid)
record = current.db(query).select(limitby=(0, 1)).first()
else:
record = None
import uuid
uid = uuid.uuid4()
if record:
role_id = record.id
record.update_record(deleted=False,
role=role,
description=description,
hidden=hidden,
system=system,
protected=protected)
else:
role_id = table.insert(uuid=uid,
role=role,
description=description,
hidden=hidden,
system=system,
protected=protected)
if role_id:
for acl in acls:
self.s3_update_acl(role_id, **acl)
return role_id
# -------------------------------------------------------------------------
def s3_delete_role(self, role_id):
"""
Remove a role from the system.
@param role_id: the ID or UID of the role
@note: protected roles cannot be deleted with this function,
need to reset the protected-flag first to override
"""
db = current.db
table = self.settings.table_group
if isinstance(role_id, str) and not role_id.isdigit():
gquery = (table.uuid == role_id)
else:
role_id = int(role_id)
gquery = (table.id == role_id)
role = db(gquery).select(limitby=(0, 1)).first()
if role and not role.protected:
# Remove all memberships for this role
mtable = self.settings.table_membership
mquery = (mtable.group_id == role.id)
db(mquery).update(deleted=True)
# Remove all ACLs for this role
ptable = self.permission.table
pquery = (ptable.group_id == role.id)
db(pquery).update(deleted=True)
# Remove the role
db(gquery).update(role=None, deleted=True)
# -------------------------------------------------------------------------
def resolve_role_ids(self, roles):
"""
Resolve role UIDs
@param roles: list of role IDs or UIDs (or mixed)
"""
db = current.db
if not isinstance(roles, (list, tuple)):
roles = [roles]
role_ids = []
role_uids = []
for role_id in roles:
if isinstance(role_id, str) and not role_id.isdigit():
role_uids.append(role_id)
else:
_id = int(role_id)
if _id not in role_ids:
role_ids.append(_id)
if role_uids:
rtable = self.settings.table_group
query = (rtable.deleted != True) & \
(rtable.uuid.belongs(role_uids))
rows = db(query).select(rtable.id)
role_ids += [r.id for r in rows if r.id not in role_ids]
return role_ids
# -------------------------------------------------------------------------
def s3_assign_role(self, user_id, role_id):
"""
Assigns a role to a user
@param user_id: the record ID of the user account
@param role_id: the record ID(s)/UID(s) of the role
@note: strings or lists of strings are assumed to be
role UIDs
"""
db = current.db
rtable = self.settings.table_group
mtable = self.settings.table_membership
query = (rtable.deleted != True)
if isinstance(role_id, (list, tuple)):
if isinstance(role_id[0], str):
query &= (rtable.uuid.belongs(role_id))
else:
roles = role_id
elif isinstance(role_id, str):
query &= (rtable.uuid == role_id)
else:
roles = [role_id]
if query is not None:
roles = db(query).select(rtable.id)
roles = [r.id for r in roles]
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id.belongs(roles))
assigned = db(query).select(mtable.group_id)
assigned_roles = [r.group_id for r in assigned]
for role in roles:
if role not in assigned_roles:
mtable.insert(user_id=user_id, group_id=role)
# -------------------------------------------------------------------------
def s3_retract_role(self, user_id, role_id):
"""
Removes a role assignment from a user account
@param user_id: the record ID of the user account
@param role_id: the record ID(s)/UID(s) of the role
@note: strings or lists of strings are assumed to be
role UIDs
"""
if not role_id:
return
db = current.db
rtable = self.settings.table_group
mtable = self.settings.table_membership
query = (rtable.deleted != True)
if isinstance(role_id, (list, tuple)):
if isinstance(role_id[0], str):
query &= (rtable.uuid.belongs(role_id))
else:
roles = role_id
elif isinstance(role_id, str):
query &= (rtable.uuid == role_id)
else:
roles = [role_id]
if query is not None:
roles = db(query).select(rtable.id)
roles = [r.id for r in roles]
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id.belongs(roles))
db(query).update(deleted=True)
# -------------------------------------------------------------------------
def s3_has_role(self, role):
"""
Check whether the currently logged-in user has a role
@param role: the record ID or UID of the role
"""
if self.override:
return True
db = current.db
session = current.session
if not session.s3:
return False
# Trigger HTTP basic auth
self.s3_logged_in()
roles = session.s3.roles
if not roles:
return False
system_roles = session.s3.system_roles
if system_roles and system_roles.ADMIN in roles:
# Administrators have all roles
return True
if isinstance(role, str):
if role.isdigit():
role = int(role)
else:
rtable = self.settings.table_group
query = (rtable.deleted != True) & \
(rtable.uuid == role)
row = db(query).select(rtable.id, limitby=(0, 1)).first()
if row:
role = row.id
else:
return False
return role in session.s3.roles
# -------------------------------------------------------------------------
# ACL management
# -------------------------------------------------------------------------
def s3_update_acls(self, role, *acls):
"""
Wrapper for s3_update_acl to allow batch updating
"""
for acl in acls:
self.s3_update_acl(role, **acl)
# -------------------------------------------------------------------------
def s3_update_acl(self, role,
c=None, f=None, t=None, oacl=None, uacl=None,
organisation=None):
"""
Back-end method to update an ACL
"""
ALL = "all"
all_organisations = organisation == ALL
if all_organisations:
organisation = None
table = self.permission.table
if not table:
# ACLs not relevant to this security policy
return None
if c is None and f is None and t is None:
return None
if t is not None:
c = f = None
if uacl is None:
uacl = self.permission.NONE
if oacl is None:
oacl = uacl
if role:
query = ((table.group_id == role) & \
(table.controller == c) & \
(table.function == f) & \
(table.tablename == t))
record = current.db(query).select(table.id, limitby=(0, 1)).first()
acl = dict(deleted=False,
group_id=role,
controller=c,
function=f,
tablename=t,
oacl=oacl,
uacl=uacl,
all_organisations=all_organisations,
organisation=organisation)
if record:
success = record.update_record(**acl)
else:
success = table.insert(**acl)
return success
# -------------------------------------------------------------------------
# Utilities
# -------------------------------------------------------------------------
def s3_group_members(self, group_id):
"""
Get a list of members of a group
@param group_id: the group record ID
@returns: a list of the user_ids for members of a group
"""
membership = self.settings.table_membership
query = (membership.deleted != True) & \
(membership.group_id == group_id)
members = current.db(query).select(membership.user_id)
return [member.user_id for member in members]
# -------------------------------------------------------------------------
def s3_user_pe_id(self, user_id):
"""
Get the person pe_id for a user ID
@param user_id: the user ID
"""
db = current.db
s3db = current.s3db
ltable = s3db.pr_person_user
query = (ltable.user_id == user_id)
row = db(query).select(ltable.pe_id, limitby=(0, 1)).first()
if row:
return row.pe_id
return None
# -------------------------------------------------------------------------
def s3_logged_in_person(self):
"""
Get the person record ID for the current logged-in user
"""
db = current.db
s3db = current.s3db
ptable = s3db.pr_person
if self.s3_logged_in():
try:
query = (ptable.pe_id == self.user.pe_id)
except AttributeError:
# Prepop
pass
else:
record = db(query).select(ptable.id,
limitby=(0, 1)).first()
if record:
return record.id
return None
# -------------------------------------------------------------------------
def s3_logged_in_human_resource(self):
"""
Get the person record ID for the current logged-in user
"""
db = current.db
s3db = current.s3db
ptable = s3db.pr_person
htable = s3db.hrm_human_resource
if self.s3_logged_in():
try:
query = (htable.person_id == ptable.id) & \
(ptable.pe_id == self.user.pe_id)
except AttributeError:
# Prepop
pass
else:
record = db(query).select(htable.id,
orderby =~htable.modified_on,
limitby=(0, 1)).first()
if record:
return record.id
return None
# -------------------------------------------------------------------------
def s3_has_permission(self, method, table, record_id = 0):
"""
S3 framework function to define whether a user can access a record
in manner "method". Designed to be called from the RESTlike
controller.
@param table: the table or tablename
"""
if self.override:
return True
db = current.db
session = current.session
if not hasattr(table, "_tablename"):
s3db = current.s3db
table = s3db[table]
if session.s3.security_policy == 1:
# Simple policy
# Anonymous users can Read.
if method == "read":
authorised = True
else:
# Authentication required for Create/Update/Delete.
authorised = self.s3_logged_in()
elif session.s3.security_policy == 2:
# Editor policy
# Anonymous users can Read.
if method == "read":
authorised = True
elif method == "create":
# Authentication required for Create.
authorised = self.s3_logged_in()
elif record_id == 0 and method == "update":
# Authenticated users can update at least some records
authorised = self.s3_logged_in()
else:
# Editor role required for Update/Delete.
authorised = self.s3_has_role("Editor")
if not authorised and self.user and "owned_by_user" in table:
# Creator of Record is allowed to Edit
query = (table.id == record_id)
record = db(query).select(table.owned_by_user,
limitby=(0, 1)).first()
if record and self.user.id == record.owned_by_user:
authorised = True
elif session.s3.security_policy == 3:
# Controller ACLs
self.permission.use_cacls = True
self.permission.use_facls = False
self.permission.use_tacls = False
authorised = self.permission.has_permission(table,
record=record_id,
method=method)
elif session.s3.security_policy == 4:
# Controller+Function ACLs
self.permission.use_cacls = True
self.permission.use_facls = True
self.permission.use_tacls = False
authorised = self.permission.has_permission(table,
record=record_id,
method=method)
elif session.s3.security_policy >= 5:
# Controller+Function+Table ACLs
self.permission.use_cacls = True
self.permission.use_facls = True
self.permission.use_tacls = True
authorised = self.permission.has_permission(table,
record=record_id,
method=method)
else:
# Full policy
if self.s3_logged_in():
# Administrators are always authorised
if self.s3_has_role(1):
authorised = True
else:
# Require records in auth_permission to specify access
# (default Web2Py-style)
authorised = self.has_permission(method, table, record_id)
else:
# No access for anonymous
authorised = False
return authorised
# -------------------------------------------------------------------------
def s3_accessible_query(self, method, table):
"""
Returns a query with all accessible records for the currently
logged-in user
@note: This method does not work on GAE because it uses JOIN and IN
"""
if self.override:
return table.id > 0
db = current.db
session = current.session
T = current.T
policy = session.s3.security_policy
if policy == 1:
# "simple" security policy: show all records
return table.id > 0
elif policy == 2:
# "editor" security policy: show all records
return table.id > 0
elif policy in (3, 4, 5, 6):
# ACLs: use S3Permission method
query = self.permission.accessible_query(table, method)
return query
# "Full" security policy
if self.s3_has_role(1):
# Administrators can see all data
return table.id > 0
# If there is access to the entire table then show all records
try:
user_id = self.user.id
except:
user_id = 0
if self.has_permission(method, table, 0, user_id):
return table.id > 0
# Filter Records to show only those to which the user has access
session.warning = T("Only showing accessible records!")
membership = self.settings.table_membership
permission = self.settings.table_permission
return table.id.belongs(db(membership.user_id == user_id)\
(membership.group_id == permission.group_id)\
(permission.name == method)\
(permission.table_name == table)\
._select(permission.record_id))
# -------------------------------------------------------------------------
def s3_has_membership(self, group_id=None, user_id=None, role=None):
"""
Checks if user is member of group_id or role
Extends Web2Py's requires_membership() to add new functionality:
- Custom Flash style
- Uses s3_has_role()
"""
if self.override:
return True
group_id = group_id or self.id_group(role)
try:
group_id = int(group_id)
except:
group_id = self.id_group(group_id) # interpret group_id as a role
if self.s3_has_role(group_id):
r = True
else:
r = False
log = self.messages.has_membership_log
if log:
if not user_id and self.user:
user_id = self.user.id
self.log_event(log % dict(user_id=user_id,
group_id=group_id, check=r))
return r
# Override original method
has_membership = s3_has_membership
# -------------------------------------------------------------------------
def s3_requires_membership(self, role):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id. If role is
provided instead of group_id then the group_id is calculated.
Extends Web2Py's requires_membership() to add new functionality:
- Custom Flash style
- Uses s3_has_role()
- Administrators (id=1) are deemed to have all roles
"""
def decorator(action):
def f(*a, **b):
if self.override:
return action(*a, **b)
if not self.s3_logged_in():
request = current.request
next = URL(args=request.args, vars=request.get_vars)
import urllib
redirect("%s?_next=%s" % (self.settings.login_url,
urllib.quote(next)))
if not self.s3_has_role(role) and not self.s3_has_role(1):
current.session.error = self.messages.access_denied
next = self.settings.on_failed_authorization
redirect(next)
return action(*a, **b)
f.__doc__ = action.__doc__
return f
return decorator
# Override original method
requires_membership = s3_requires_membership
# -------------------------------------------------------------------------
def s3_make_session_owner(self, table, record_id):
"""
Makes the current session owner of a record
@param table: the table or table name
@param record_id: the record ID
"""
if hasattr(table, "_tablename"):
table = table._tablename
if not self.user:
session = current.session
if "owned_records" not in session:
session.owned_records = Storage()
records = session.owned_records.get(table, [])
record_id = str(record_id)
if record_id not in records:
records.append(record_id)
session.owned_records[table] = records
# -------------------------------------------------------------------------
def s3_session_owns(self, table, record_id):
"""
Checks whether the current session owns a record
@param table: the table or table name
@param record_id: the record ID
"""
if hasattr(table, "_tablename"):
table = table._tablename
if not self.user:
try:
records = current.session.owned_records.get(table, [])
except:
records = []
if str(record_id) in records:
return True
return False
# -------------------------------------------------------------------------
def s3_set_record_owner(self, table, record):
"""
Set the owner organisation for a record
@param table: the table or table name
@param record: the record (as row) or record ID
"""
db = current.db
s3db = current.s3db
manager = current.manager
site_types = self.org_site_types
OWNED_BY_ORG = "owned_by_organisation"
ORG_ID = "organisation_id"
ORG_PREFIX = "Org_%s"
ORG_TABLENAME = "org_organisation"
NAME = "name"
org_table = s3db[ORG_TABLENAME]
grp_table = self.settings.table_group
# Get the table
if isinstance(table, str):
table = s3db[table]
tablename = table._tablename
_id = table._id.name
# Which fields are available?
fields = [table._id.name,
NAME,
ORG_ID,
OWNED_BY_ORG]
fields = [table[f] for f in fields if f in table.fields]
# Get the record
if not isinstance(record, Row):
record_id = record
record = db(table._id == record_id).select(limitby=(0, 1),
*fields).first()
else:
if table._id.name in record:
record_id = record[table._id.name]
else:
record_id = None
missing = [f for f in fields if f not in record]
if missing:
if record_id:
query = table._id == record_id
record = db(query).select(limitby=(0, 1),
*fields).first()
else:
record = None
if not record:
# Raise an exception here?
return
# Get the organisation ID
org_role = None
if tablename == ORG_TABLENAME:
organisation_id = record[_id]
if OWNED_BY_ORG in record:
org_role = record[OWNED_BY_ORG]
if not org_role:
# Create a new org_role
uuid = ORG_PREFIX % organisation_id
if NAME in table:
name = record[NAME]
else:
name = uuid
role = Storage(uuid=uuid,
deleted=False,
hidden=False,
system=True,
protected=True,
role="%s (Organisation)" % name,
description="All Staff of Organization %s" % name)
query = (grp_table.uuid == role.uuid) | \
(grp_table.role == role.role)
record = db(query).select(grp_table.id,
limitby=(0, 1)).first()
if not record:
org_role = grp_table.insert(**role)
else:
record.update_record(**role)
org_role = record.id
elif ORG_ID in table:
organisation_id = record[ORG_ID]
# Get the org_role from the organisation
if organisation_id:
query = org_table.id == organisation_id
organisation = db(query).select(org_table[OWNED_BY_ORG],
limitby=(0, 1)).first()
if organisation:
org_role = organisation[OWNED_BY_ORG]
# Update the record as necessary
data = Storage()
if org_role and OWNED_BY_ORG in table:
data[OWNED_BY_ORG] = org_role
if data and hasattr(record, "update_record"):
record.update_record(**data)
elif data and record_id:
db(table._id == record_id).update(**data)
return
# -------------------------------------------------------------------------
def s3_send_welcome_email(self, user):
"""
Send a welcome mail to newly-registered users
- especially suitable for users from Facebook/Google who don't
verify their emails
"""
if "name" in user:
user["first_name"] = user["name"]
if "family_name" in user:
# Facebook
user["last_name"] = user["family_name"]
subject = self.messages.welcome_email_subject
message = self.messages.welcome_email
self.settings.mailer.send(user["email"], subject=subject, message=message)
# =============================================================================
class S3Permission(object):
"""
S3 Class to handle permissions
@author: Dominic König <dominic@aidiq.com>
"""
TABLENAME = "s3_permission"
CREATE = 0x0001
READ = 0x0002
UPDATE = 0x0004
DELETE = 0x0008
ALL = CREATE | READ | UPDATE | DELETE
NONE = 0x0000 # must be 0!
PERMISSION_OPTS = OrderedDict([
#(NONE, "NONE"),
#(READ, "READ"),
#(CREATE|UPDATE|DELETE, "WRITE"),
[CREATE, "CREATE"],
[READ, "READ"],
[UPDATE, "UPDATE"],
[DELETE, "DELETE"]])
# Method string <-> required permission
METHODS = Storage({
"create": CREATE,
"import": CREATE,
"read": READ,
"report": READ,
"search": READ,
"update": UPDATE,
"delete": DELETE})
# Policy helpers
most_permissive = lambda self, acl: \
reduce(lambda x, y: (x[0]|y[0], x[1]|y[1]),
acl, (self.NONE, self.NONE))
most_restrictive = lambda self, acl: \
reduce(lambda x, y: (x[0]&y[0], x[1]&y[1]),
acl, (self.ALL, self.ALL))
# -------------------------------------------------------------------------
def __init__(self, auth, tablename=None):
"""
Constructor, invoked by AuthS3.__init__
@param tablename: the name for the permissions table
"""
# Instantiated once per request, but before Auth tables
# are defined and authentication is checked, thus no use
# to check permissions in the constructor
# Auth
self.auth = auth
# Deployment settings
settings = current.deployment_settings
self.policy = settings.get_security_policy()
# Which level of granularity do we want?
self.use_cacls = self.policy in (3, 4, 5, 6) # Controller ACLs
self.use_facls = self.policy in (4, 5, 6) # Function ACLs
self.use_tacls = self.policy in (5, 6) # Table ACLs
self.org_roles = self.policy == 6 # OrgAuth
self.modules = settings.modules
# If a large number of roles in the system turnes into a bottleneck
# in policy 6, then we could reduce the number of roles in
# subsequent queries; this would though add another query (or even two
# more queries) to the request, so the hypothetic performance gain
# should first be confirmed by tests:
#if self.policy == 6:
#gtable = auth.settings.table_group
#org_roles = current.db(gtable.uid.like("Org_%")).select(gtable.id)
#self.org_roles = [r.id for r in org_roles]
#else:
#self.org_roles = []
# Permissions table
self.tablename = tablename or self.TABLENAME
self.table = current.db.get(self.tablename, None)
# Error messages
T = current.T
self.INSUFFICIENT_PRIVILEGES = T("Insufficient Privileges")
self.AUTHENTICATION_REQUIRED = T("Authentication Required")
# Request information
request = current.request
self.controller = request.controller
self.function = request.function
# Request format
self.format = request.extension
if "format" in request.get_vars:
ext = request.get_vars.format
if isinstance(ext, list):
ext = ext[-1]
self.format = ext.lower() or self.format
else:
ext = [a for a in request.args if "." in a]
if ext:
self.format = ext[-1].rsplit(".", 1)[1].lower()
if request.function == "ticket" and \
request.controller == "admin":
# Error tickets need an override
self.format = "html"
# Page permission cache
self.page_acls = Storage()
self.table_acls = Storage()
# Pages which never require permission:
# Make sure that any data access via these pages uses
# accessible_query explicitly!
self.unrestricted_pages = ("default/index",
"default/user",
"default/contact",
"default/about")
# Default landing pages
_next = URL(args=request.args, vars=request.vars)
self.homepage = URL(c="default", f="index")
self.loginpage = URL(c="default", f="user", args="login",
vars=dict(_next=_next))
# -------------------------------------------------------------------------
def define_table(self, migrate=True, fake_migrate=False):
"""
Define permissions table, invoked by AuthS3.define_tables()
"""
db = current.db
table_group = self.auth.settings.table_group
if table_group is None:
table_group = "integer" # fallback (doesn't work with requires)
if not self.table:
self.table = db.define_table(self.tablename,
Field("group_id", table_group),
Field("controller", length=64),
Field("function", length=512),
Field("tablename", length=512),
Field("oacl", "integer", default=self.ALL),
Field("uacl", "integer", default=self.READ),
# Only apply to records owned by this
# organisation role (policy 6 only):
Field("all_organisations", "boolean",
default=False),
Field("organisation",
table_group,
requires = IS_NULL_OR(IS_IN_DB(
db, table_group.id))),
migrate=migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()+s3_deletion_status()))
# -------------------------------------------------------------------------
def __call__(self,
c=None,
f=None,
table=None,
record=None):
"""
Get the ACL for the current user for a path
@param c: the controller name (falls back request.controller)
@param f: the function name (falls back to request.function)
@param table: the table
@param record: the record ID (or the Row if already loaded)
@note: if passing a Row, it must contain all available ownership
fields (id, owned_by_user, owned_by_group), otherwise the
record will be re-loaded by this function
"""
_debug("auth.permission(c=%s, f=%s, table=%s, record=%s)" %
(c, f, table, record))
t = self.table # Permissions table
auth = self.auth
sr = auth.get_system_roles()
if record == 0:
record = None
# Get user roles, check logged_in to trigger HTTPBasicAuth
if not auth.s3_logged_in():
roles = [sr.ANONYMOUS]
else:
roles = [sr.AUTHENTICATED]
if current.session.s3 is not None:
roles = current.session.s3.roles or roles
if not self.use_cacls:
# Fall back to simple authorization
_debug("Simple authorization")
if auth.s3_logged_in():
_debug("acl=%04x" % self.ALL)
return self.ALL
else:
_debug("acl=%04x" % self.READ)
return self.READ
if sr.ADMIN in roles:
_debug("Administrator, acl=%04x" % self.ALL)
return self.ALL
# Fall back to current request
c = c or self.controller
f = f or self.function
# Do we need to check the owner role (i.e. table+record given)?
is_owner = False
require_org = None
if table is not None and record is not None:
owner_role, owner_user, owner_org = \
self.get_owners(table, record)
is_owner = self.is_owner(table, None,
owner_role=owner_role,
owner_user=owner_user,
owner_org=owner_org)
if self.policy == 6:
require_org = owner_org
# Get the applicable ACLs
page_acl = self.page_acl(c=c, f=f,
require_org=require_org)
if table is None or not self.use_tacls:
acl = page_acl
else:
if sr.EDITOR in roles:
table_acl = (self.ALL, self.ALL)
else:
table_acl = self.table_acl(table=table,
c=c,
default=page_acl,
require_org=require_org)
acl = self.most_restrictive((page_acl, table_acl))
# Decide which ACL to use for this case
if acl[0] == self.NONE and acl[1] == self.NONE:
# No table access at all
acl = self.NONE
elif record is None:
# No record specified, return most permissive ACL
acl = (acl[0] & ~self.CREATE) | acl[1]
else:
# ACL based on ownership
acl = is_owner and (acl[0] | acl[1]) or acl[1]
_debug("acl=%04x" % acl)
return acl
# -------------------------------------------------------------------------
def page_acl(self, c=None, f=None, require_org=None):
"""
Get the ACL for a page
@param c: the controller (falls back to current request)
@param f: the function (falls back to current request)
@returns: tuple of (ACL for owned resources, ACL for all resources)
"""
session = current.session
policy = self.policy
t = self.table
sr = self.auth.get_system_roles()
most_permissive = self.most_permissive
roles = []
if session.s3 is not None:
roles = session.s3.roles or []
if sr.ADMIN in roles:
# Admin always has rights
return (self.ALL, self.ALL)
c = c or self.controller
f = f or self.function
page = "%s/%s" % (c, f)
if page in self.unrestricted_pages:
page_acl = (self.ALL, self.ALL)
elif c not in self.modules or \
c in self.modules and not self.modules[c].restricted or \
not self.use_cacls:
# Controller is not restricted => simple authorization
if self.auth.s3_logged_in():
page_acl = (self.ALL, self.ALL)
else:
page_acl = (self.READ, self.READ)
else:
# Lookup cached result
page_acl = self.page_acls.get((page, require_org), None)
if page_acl is None:
page_acl = (self.NONE, self.NONE) # default
q = ((t.deleted != True) & \
(t.controller == c) & \
((t.function == f) | (t.function == None)))
if roles:
query = (t.group_id.belongs(roles)) & q
else:
query = (t.group_id == None) & q
# Additional restrictions in OrgAuth
if policy == 6 and require_org:
field = t.organisation
query &= ((t.all_organisations == True) | \
(field == require_org) | (field == None))
rows = current.db(query).select()
if rows:
# ACLs found, check for function-specific
controller_acl = []
function_acl = []
for row in rows:
if not row.function:
controller_acl += [(row.oacl, row.uacl)]
else:
function_acl += [(row.oacl, row.uacl)]
# Function-specific ACL overrides Controller ACL
if function_acl and self.use_facls:
page_acl = most_permissive(function_acl)
elif controller_acl:
page_acl = most_permissive(controller_acl)
# Remember this result
self.page_acls.update({(page, require_org): page_acl})
return page_acl
# -------------------------------------------------------------------------
def table_acl(self, table=None, c=None, default=None,
require_org=None):
"""
Get the ACL for a table
@param table: the table
@param c: the controller (falls back to current request)
@param default: ACL to apply if no specific table ACL is found
@returns: tuple of (ACL for owned resources, ACL for all resources)
"""
if table is None or not self.use_tacls:
return self.page_acl(c=c)
policy = self.policy
t = self.table
sr = self.auth.get_system_roles()
roles = []
if current.session.s3 is not None:
roles = current.session.s3.roles or []
if sr.ADMIN in roles:
# Admin always has rights
return (self.ALL, self.ALL)
c = c or self.controller
if default is None:
if self.auth.s3_logged_in():
default = (self.ALL, self.ALL)
else:
default = (self.READ, self.READ)
# Already loaded?
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
table_acl = self.table_acls.get((tablename, require_org), None)
if table_acl is None:
q = ((t.deleted != True) & \
(t.tablename == tablename) &
((t.controller == c) | (t.controller == None)))
if roles:
query = (t.group_id.belongs(roles)) & q
else:
query = (t.group_id == None) & q
# Additional restrictions in OrgAuth
if policy == 6 and require_org:
field = t.organisation
query &= ((t.all_organisations == True) | \
(field == require_org) | (field == None))
rows = current.db(query).select()
table_acl = [(r.oacl, r.uacl) for r in rows]
if table_acl:
# ACL found, apply most permissive role
table_acl = self.most_permissive(table_acl)
else:
# No ACL found for any of the roles, fall back to default
table_acl = default
# Remember this result
self.table_acls.update({(tablename, require_org): table_acl})
return table_acl
# -------------------------------------------------------------------------
def get_owners(self, table, record):
"""
Get the organisation/group/user owning a record
@param table: the table
@param record: the record ID (or the Row, if already loaded)
"""
owner_org = None
owner_role = None
owner_user = None
record_id = None
# Check which ownership fields the table defines
ownership_fields = ("owned_by_user",
"owned_by_group",
"owned_by_organisation")
fields = [f for f in ownership_fields if f in table.fields]
if not fields:
# Ownership is not defined for this table
return (None, None, None)
if isinstance(record, Row):
# Check if all necessary fields are present
missing = [f for f in fields if f not in record]
if missing:
# Have to reload the record :(
if table._id.name in record:
record_id = record[table._id.name]
record = None
else:
# Record ID given
record_id = record
record = None
if not record and record_id:
# Get the record
fs = [table[f] for f in fields] + [table.id]
query = (table._id == record_id)
record = current.db(query).select(limitby=(0, 1), *fs).first()
if not record:
# Record does not exist
return (None, None, None)
if "owned_by_group" in record:
owner_role = record["owned_by_group"]
if "owned_by_user" in record:
owner_user = record["owned_by_user"]
if "owned_by_organisation" in record:
owner_org = record["owned_by_organisation"]
return (owner_role, owner_user, owner_org)
# -------------------------------------------------------------------------
def is_owner(self, table, record,
owner_role=None,
owner_user=None,
owner_org=None):
"""
Establish the ownership of a record
@param table: the table
@param record: the record ID (or the Row if already loaded)
@param owner_role: owner_role of the record (if already known)
@param owner_user: owner_user of the record (if already known)
@param owner_org: owner_org of the record (if already known)
@note: if passing a Row, it must contain all available ownership
fields (id, owned_by_user, owned_by_group), otherwise the
record will be re-loaded by this function
"""
user_id = None
roles = []
sr = self.auth.get_system_roles()
if self.auth.user is not None:
user_id = self.auth.user.id
if current.session.s3 is not None:
roles = current.session.s3.roles or []
if not user_id and not roles:
return False
elif sr.ADMIN in roles:
# Admin owns all records
return True
elif record:
owner_role, owner_user, owner_org = \
self.get_owners(table, record)
try:
record_id = record.id
except:
record_id = record
# Session ownership?
if not user_id:
if not owner_user and record_id and \
self.auth.s3_session_owns(table, record_id):
# Session owns record
return True
else:
return False
# Individual record ownership
if owner_user and owner_user == user_id:
return True
# OrgAuth?
if self.policy == 6 and owner_org:
# Must have the organisation's staff role
if owner_org not in roles:
return False
# Owner?
if not owner_role and not owner_user:
# All authenticated users own this record
return True
elif owner_role and owner_role in roles:
# user has owner role
return True
else:
return False
# -------------------------------------------------------------------------
def hidden_modules(self):
"""
List of modules to hide from the main menu
"""
sr = self.auth.get_system_roles()
hidden_modules = []
if self.use_cacls:
restricted_modules = [m for m in self.modules
if self.modules[m].restricted]
roles = []
if current.session.s3 is not None:
roles = current.session.s3.roles or []
if sr.ADMIN in roles or sr.EDITOR in roles:
return []
if not roles:
hidden_modules = restricted_modules
else:
t = self.table
query = (t.deleted != True) & \
(t.controller.belongs(restricted_modules)) & \
(t.tablename == None)
if roles:
query = query & (t.group_id.belongs(roles))
else:
query = query & (t.group_id == None)
rows = current.db(query).select()
acls = dict()
for acl in rows:
if acl.controller not in acls:
acls[acl.controller] = self.NONE
acls[acl.controller] |= acl.oacl | acl.uacl
hidden_modules = [m for m in restricted_modules
if m not in acls or not acls[m]]
return hidden_modules
# -------------------------------------------------------------------------
def accessible_url(self,
c=None,
f=None,
p=None,
t=None,
a=None,
args=[],
vars={},
anchor="",
extension=None,
env=None):
"""
Return a URL only if accessible by the user, otherwise False
@param c: the controller
@param f: the function
@param p: the permission (defaults to READ)
@param t: the tablename (defaults to <c>_<f>)
@param a: the application name
@param args: the URL arguments
@param vars: the URL variables
@param anchor: the anchor (#) of the URL
@param extension: the request format extension
@param env: the environment
"""
required = self.METHODS
if p in required:
permission = required[p]
else:
permission = self.READ
if not c:
c = self.controller
if not f:
f = self.function
if t is None:
tablename = "%s_%s" % (c, f)
else:
tablename = t
# Hide disabled modules
if self.modules and c not in self.modules:
return False
permitted = True
if not self.auth.override:
if self.use_cacls:
acl = self(c=c, f=f, table=tablename)
if acl & permission != permission:
permitted = False
else:
if permission != self.READ:
permitted = self.auth.s3_logged_in()
if permitted:
return URL(a=a,
c=c,
f=f,
args=args,
vars=vars,
anchor=anchor,
extension=extension,
env=env)
else:
return False
# -------------------------------------------------------------------------
def page_restricted(self, c=None, f=None):
"""
Checks whether a page is restricted (=whether ACLs
are to be applied)
@param c: controller
@param f: function
"""
page = "%s/%s" % (c, f)
if page in self.unrestricted_pages:
return False
elif c not in self.modules or \
c in self.modules and not self.modules[c].restricted:
return False
return True
# -------------------------------------------------------------------------
def applicable_acls(self, roles, racl, c=None, f=None, t=None):
"""
Get the available ACLs for the particular situation
@param roles: the roles of the current user
@param racl: the required ACL
@param c: controller
@param f: function
@param t: tablename
@returns: None for no ACLs to apply (access granted), [] for
no ACLs matching the required permissions (access
denied), or a list of ACLs to apply.
"""
db = current.db
table = self.table
if not self.use_cacls:
# We do not use ACLs at all
return None
c = c or self.controller
f = f or self.function
if self.page_restricted(c=c, f=f):
page_restricted = True
else:
page_restricted = False
# Get page ACLs
page_acls = None
if page_restricted:
# Base query
query = (table.deleted != True) & \
(table.function == None)
if f and self.use_facls:
query = (query | (table.function == f))
query &= (table.controller == c)
# Do not use delegated ACLs except for policy 6
if self.policy != 6:
query &= (table.organisation == None)
# Restrict to available roles
if roles:
query &= (table.group_id.belongs(roles))
else:
query &= (table.group_id == None)
page_acls = db(query).select(table.ALL)
if page_acls:
if f and self.use_facls:
facl = [acl for acl in page_acls if acl.function != None]
if facl:
page_acls = facl
page_acls = [acl for acl in page_acls
if (acl.uacl & racl == racl or
acl.oacl & racl == racl)]
else:
# Page is restricted, but no permitting ACL
# available for this set of roles => no access
return []
# Get table ACLs
table_acls = []
if t and self.use_tacls:
# Base query
query = ((table.deleted != True) & \
(table.controller == None) & \
(table.function == None) &
(table.tablename == t))
# Is the table restricted at all?
restricted = db(query).select(limitby=(0, 1)).first() is not None
# Do not use delegated ACLs except for policy 6
if self.policy != 6:
query &= (table.organisation == None)
# Restrict to available roles
if roles:
query = (table.group_id.belongs(roles)) & query
else:
query = (table.group_id == None) & query
table_acls = db(query).select(table.ALL)
if restricted and table_acls:
# if the table is restricted and there are ACLs
# available for this set of roles, then deny access
# if none of the ACLs gives the required permissions
_debug("acls: %s" % table_acls)
default = []
else:
# otherwise, if the table is unrestricted or there are
# no restricting ACLs for this set of roles, then grant
# access as per page_acls
default = page_acls
# Find matches
table_acls = [acl for acl in table_acls
if (acl.uacl & racl == racl or
acl.oacl & racl == racl)]
if table_acls:
# Found matching table ACLs, grant access
return table_acls
else:
# No matching table ACLs found
return default
# default:
return page_acls
# -------------------------------------------------------------------------
def accessible_query(self, table, *methods):
"""
Query for records which the user is permitted to access
with methods
Example::
query = auth.permission.accessible_query(table,
"read", "update")
- requests a query for records that can be both read and
updated.
@param table: the DB table
@param methods: list of methods for which permission is
required (AND), any combination of "create",
"read", "update", "delete"
"""
_debug("accessible_query(%s, %s)" % (table, methods))
session = current.session
policy = self.policy
required = self.METHODS
sr = self.auth.get_system_roles()
OWNED_BY_ORG = "owned_by_organisation"
OWNED_BY_USER = "owned_by_user"
OWNED_BY_GROUP = "owned_by_group"
ALL_ORGS = "all_organisations"
# Default queries
query = (table._id != None)
no_access = (table._id == None)
# Required ACL
racl = reduce(lambda a, b: a | b,
[required[m]
for m in methods if m in required],
self.NONE)
if not racl:
_debug("No permission specified, query=%s" % query)
return query
# User & Roles
user_id = None
if self.auth.user is not None:
user_id = self.auth.user.id
roles = []
if session.s3 is not None:
roles = session.s3.roles or []
if sr.ADMIN in roles or sr.EDITOR in roles:
_debug("Admin/Editor in Roles, query=%s" % query)
return query
# Org roles the user has
org_roles = []
all_orgs = False
if policy == 6:
org_roles = list(roles)
# Applicable ACLs
acls = self.applicable_acls(roles, racl, t=table)
permitted = False
ownership_required = True
if acls is None:
permitted = True
ownership_required = False
elif acls:
permitted = True
for acl in acls:
_debug("ACL: oacl=%04x uacl=%04x" % (acl.oacl, acl.uacl))
if acl.uacl & racl == racl:
ownership_required = False
_debug("uACL found - no ownership required")
if policy == 6:
org_role = acl.organisation
if acl[ALL_ORGS]:
all_orgs = True
elif org_role and org_role not in org_roles:
org_roles.append(org_role)
if not permitted:
_debug("No access")
return no_access
_debug("ownership_required=%s" % ownership_required)
# Query fragments
if OWNED_BY_ORG in table:
has_org_role = ((table[OWNED_BY_ORG] == None) | \
(table[OWNED_BY_ORG].belongs(org_roles)))
if OWNED_BY_USER in table:
user_owns_record = (table[OWNED_BY_USER] == user_id)
# OrgAuth
q = None
if policy == 6 and OWNED_BY_ORG in table and not all_orgs:
q = has_org_role
if user_id and OWNED_BY_USER in table:
q |= user_owns_record
if q is not None:
query = q
if ownership_required:
if not user_id:
query = (table._id == None)
if OWNED_BY_USER in table:
try:
records = session.owned_records.get(table._tablename,
None)
except:
pass
else:
if records:
query = (table._id.belongs(records))
else:
qowner = qrole = quser = None
if OWNED_BY_GROUP in table:
qrole = (table.owned_by_group.belongs(roles))
if OWNED_BY_USER in table and user_id:
quser = (table.owned_by_user == user_id)
if qrole is not None:
qowner = qrole
if quser is not None:
if qowner is not None:
qowner = (qowner | quser)
else:
qowner = quser
if qowner is not None:
if query is not None:
query = query & qowner
else:
query = qowner
# Fallback
if query is None:
query = (table._id > 0)
_debug("Access granted, query=%s" % query)
return query
# -------------------------------------------------------------------------
def ownership_required(self, table, *methods):
"""
Check if record ownership is required for a method
@param table: the table
@param methods: methods to check (OR)
@status: deprecated, using applicable_acls instead
"""
sr = self.auth.get_system_roles()
roles = []
if current.session.s3 is not None:
# No ownership required in policies without ACLs
if not self.use_cacls:
return False
roles = current.session.s3.roles or []
if sr.ADMIN in roles or sr.EDITOR in roles:
return False # Admins and Editors do not need to own a record
required = self.METHODS
racl = reduce(lambda a, b: a | b,
[required[m] for m in methods if m in required],
self.NONE)
if not racl:
return False
# Available ACLs
pacl = self.page_acl()
if not self.use_tacls:
acl = pacl
else:
tacl = self.table_acl(table)
acl = (tacl[0] & pacl[0], tacl[1] & pacl[1])
# Ownership required?
permitted = (acl[0] | acl[1]) & racl == racl
ownership_required = False
if not permitted:
pkey = table.fields[0]
query = (table[pkey] == None)
elif "owned_by_group" in table or "owned_by_user" in table:
ownership_required = permitted and acl[1] & racl != racl
return ownership_required
# -------------------------------------------------------------------------
def has_permission(self, table, record=None, method=None):
"""
Check permission to access a record
@param table: the table
@param record: the record or record ID (None for any record)
@param method: the method (or tuple/list of methods),
any of "create", "read", "update", "delete"
@note: when submitting a record, the record ID and the ownership
fields (="owned_by_user", "owned_by_group") must be contained
if available, otherwise the record will be re-loaded
"""
_debug("has_permission(%s, %s, method=%s)" %
(table, record, method))
required = self.METHODS
if not isinstance(method, (list, tuple)):
method = [method]
# Required ACL
racl = reduce(lambda a, b: a | b,
[required[m] for m in method if m in required], self.NONE)
# Available ACL
aacl = self(table=table, record=record)
permitted = racl & aacl == racl
_debug("permitted=%s" % permitted)
return permitted
# -------------------------------------------------------------------------
def permitted_facilities(self,
table=None,
error_msg=None,
redirect_on_error=True,
facility_type=None):
"""
If there are no facilities that the user has permission for,
prevents create & update of records in table & gives a
warning if the user tries to.
@param table: the table or table name
@param error_msg: error message
@param redirect_on_error: whether to redirect on error
@param facility_type: restrict to this particular type of
facilities (a tablename)
"""
db = current.db
s3db = current.s3db
T = current.T
ERROR = T("You do not have permission for any facility to perform this action.")
HINT = T("Create a new facility or ensure that you have permissions for an existing facility.")
if not error_msg:
error_msg = ERROR
site_ids = []
if facility_type is None:
site_types = self.auth.org_site_types
else:
if facility_type not in self.auth.org_site_types:
return
site_types = [s3db[facility_type]]
for site_type in site_types:
try:
ftable = s3db[site_type]
if not "site_id" in ftable.fields:
continue
query = self.auth.s3_accessible_query("update", ftable)
if "deleted" in ftable:
query &= (ftable.deleted != True)
rows = db(query).select(ftable.site_id)
site_ids += [row.site_id for row in rows]
except:
# Module disabled
pass
if site_ids:
return site_ids
args = current.request.args
if "update" in args or "create" in args:
if redirect_on_error:
# Trying to create or update
# If they do no have permission to any facilities
current.session.error = "%s %s" % (error_msg, HINT)
redirect(URL(c="default", f="index"))
elif table is not None:
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
current.manager.configure(tablename, insertable = False)
return []
# -------------------------------------------------------------------------
def permitted_organisations(self,
table=None,
error_msg=None,
redirect_on_error=True):
"""
If there are no organisations that the user has update
permission for, prevents create & update of a record in
table & gives an warning if the user tries to.
@param table: the table or table name
@param error_msg: error message
@param redirect_on_error: whether to redirect on error
"""
db = current.db
s3db = current.s3db
manager = current.manager
T = current.T
ERROR = T("You do not have permission for any organization to perform this action.")
HINT = T("Create a new organization or ensure that you have permissions for an existing organization.")
if not error_msg:
error_msg = ERROR
org_table = s3db.org_organisation
query = self.auth.s3_accessible_query("update", org_table)
query &= (org_table.deleted == False)
rows = db(query).select(org_table.id)
if rows:
return [org.id for org in rows]
request = current.request
if "update" in request.args or "create" in request.args:
if redirect_on_error:
manager.session.error = error_msg + " " + HINT
redirect(URL(c="default", f="index"))
elif table is not None:
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
manager.configure(tablename, insertable = False)
return []
# -------------------------------------------------------------------------
def fail(self):
"""
Action upon insufficient permissions
"""
if self.format == "html":
# HTML interactive request => flash message + redirect
if self.auth.s3_logged_in():
current.session.error = self.INSUFFICIENT_PRIVILEGES
redirect(self.homepage)
else:
current.session.error = self.AUTHENTICATION_REQUIRED
redirect(self.loginpage)
else:
# non-HTML request => raise proper HTTP error
if self.auth.s3_logged_in():
raise HTTP(403, body=self.INSUFFICIENT_PRIVILEGES)
else:
raise HTTP(401, body=self.AUTHENTICATION_REQUIRED)
# =============================================================================
class S3Audit(object):
"""
S3 Audit Trail Writer Class
@author: Dominic König <dominic@aidiq.com>
"""
def __init__(self,
tablename="s3_audit",
migrate=True,
fake_migrate=False):
"""
Constructor
@param tablename: the name of the audit table
@param migrate: migration setting
@note: this defines the audit table
"""
db = current.db
self.table = db.get(tablename, None)
if not self.table:
self.table = db.define_table(tablename,
Field("timestmp", "datetime"),
Field("person", "integer"),
Field("operation"),
Field("tablename"),
Field("record", "integer"),
Field("representation"),
Field("old_value", "text"),
Field("new_value", "text"),
migrate=migrate,
fake_migrate=fake_migrate)
session = current.session
self.auth = session.auth
if session.auth and session.auth.user:
self.user = session.auth.user.id
else:
self.user = None
self.diff = None
# -------------------------------------------------------------------------
def __call__(self, operation, prefix, name,
form=None,
record=None,
representation="unknown"):
"""
Audit
@param operation: Operation to log, one of
"create", "update", "read", "list" or "delete"
@param prefix: the module prefix of the resource
@param name: the name of the resource (without prefix)
@param form: the form
@param record: the record ID
@param representation: the representation format
"""
settings = current.session.s3
#print >>sys.stderr, "Audit %s: %s_%s record=%s representation=%s" % \
#(operation, prefix, name, record, representation)
now = datetime.datetime.utcnow()
db = current.db
table = self.table
tablename = "%s_%s" % (prefix, name)
if record:
if isinstance(record, Row):
record = record.get("id", None)
if not record:
return True
try:
record = int(record)
except ValueError:
record = None
elif form:
try:
record = form.vars["id"]
except:
try:
record = form["id"]
except:
record = None
if record:
try:
record = int(record)
except ValueError:
record = None
else:
record = None
if operation in ("list", "read"):
if settings.audit_read:
table.insert(timestmp = now,
person = self.user,
operation = operation,
tablename = tablename,
record = record,
representation = representation)
elif operation in ("create", "update"):
if settings.audit_write:
if form:
record = form.vars.id
new_value = ["%s:%s" % (var, str(form.vars[var]))
for var in form.vars]
else:
new_value = []
table.insert(timestmp = now,
person = self.user,
operation = operation,
tablename = tablename,
record = record,
representation = representation,
new_value = new_value)
self.diff = None
elif operation == "delete":
if settings.audit_write:
query = db[tablename].id == record
row = db(query).select(limitby=(0, 1)).first()
old_value = []
if row:
old_value = ["%s:%s" % (field, row[field])
for field in row]
table.insert(timestmp = now,
person = self.user,
operation = operation,
tablename = tablename,
record = record,
representation = representation,
old_value = old_value)
self.diff = None
return True
# =============================================================================
class S3RoleManager(S3Method):
"""
REST Method to manage ACLs (Role Manager UI for administrators)
@todo: does not handle org-wise role assignment or
delegation of permissions yet.
"""
# Controllers to hide from the permissions matrix
HIDE_CONTROLLER = ("admin", "default")
# Roles to hide from the permissions matrix
# @todo: deprecate
HIDE_ROLES = (1, 4)
# Undeletable roles
# @todo: deprecate
PROTECTED_ROLES = (1, 2, 3, 4, 5)
controllers = Storage()
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Apply role manager
"""
method = self.method
manager = current.manager
if method == "list":
output = self._list(r, **attr)
elif method in ("read", "create", "update"):
output = self._edit(r, **attr)
elif method == "delete":
output = self._delete(r, **attr)
elif method == "roles" and r.name == "user":
output = self._roles(r, **attr)
elif method == "users":
output = self._users(r, **attr)
else:
r.error(405, manager.ERROR.BAD_METHOD)
if r.http == "GET" and method not in ("create", "update", "delete"):
current.session.s3.cancel = r.url()
return output
# -------------------------------------------------------------------------
def _list(self, r, **attr):
"""
List roles/permissions
"""
output = dict()
request = self.request
response = current.response
resource = self.resource
manager = current.manager
auth = manager.auth
db = current.db
table = self.table
T = current.T
if r.id:
return self._edit(r, **attr)
# Show permission matrix?
show_matrix = request.get_vars.get("matrix", False) and True
if r.interactive:
# Title and subtitle
output.update(title = T("List of Roles"))
# System roles
query = ((table.deleted != True) & \
(table.system == True))
rows = db(query).select(table.id)
system_roles = [row.id for row in rows]
# Protected roles
query = ((table.deleted != True) & \
(table.protected == True))
rows = db(query).select(table.id)
protected_roles = [row.id for row in rows]
# Filter out hidden roles
resource.add_filter((~(table.id.belongs(self.HIDE_ROLES))) &
(table.hidden != True))
resource.load()
# Get active controllers
controllers = [c for c in self.controllers.keys()
if c not in self.HIDE_CONTROLLER]
# ACLs
acl_table = auth.permission.table
query = resource.get_query()
query = query & \
(acl_table.group_id == self.table.id) & \
(acl_table.deleted != True)
records = db(query).select(acl_table.ALL)
any = "ANY"
acls = Storage({any: Storage()})
for acl in records:
c = acl.controller
f = acl.function
if not f:
f = any
role_id = acl.group_id
if f not in acls:
acls[f] = Storage()
if c not in acls[f]:
acls[f][c] = Storage()
acls[f][c][str(role_id)] = Storage(oacl = acl.oacl,
uacl = acl.uacl)
for c in controllers:
if c not in acls[any]:
acls[any][c] = Storage()
if any not in acls[any][c]:
acls[any][c][any] = Storage(oacl = auth.permission.NONE,
uacl = auth.permission.NONE)
# Table header
columns = []
headers = [TH("ID"), TH(T("Role"))]
if show_matrix:
for c in controllers:
if c in acls[any]:
headers.append(TH(self.controllers[c].name_nice))
columns.append((c, any))
for f in acls:
if f != any and c in acls[f]:
headers.append(TH(self.controllers[c].name_nice,
BR(), f))
columns.append((c, f))
else:
headers += [TH(T("Description"))]
thead = THEAD(TR(headers))
# Table body
trows = []
i = 1
for role in resource:
role_id = role.id
role_name = role.role
role_desc = role.description
edit_btn = A(T("Edit"),
_href=URL(c="admin", f="role",
args=[role_id], vars=request.get_vars),
_class="action-btn")
users_btn = A(T("Users"),
_href=URL(c="admin", f="role",
args=[role_id, "users"]),
_class="action-btn")
if role.protected:
tdata = [TD(edit_btn,
XML(" "),
users_btn),
TD(role_name)]
else:
delete_btn = A(T("Delete"),
_href=URL(c="admin", f="role",
args=[role_id, "delete"],
vars=request.get_vars),
_class="delete-btn")
tdata = [TD(edit_btn,
XML(" "),
users_btn,
XML(" "),
delete_btn),
TD(role_name)]
if show_matrix:
# Display the permission matrix
for c, f in columns:
if f in acls and c in acls[f] and \
str(role_id) in acls[f][c]:
oacl = acls[f][c][str(role_id)].oacl
uacl = acls[f][c][str(role_id)].uacl
else:
oacl = acls[any][c][any].oacl
uacl = acls[any][c][any].oacl
oaclstr = ""
uaclstr = ""
options = auth.permission.PERMISSION_OPTS
NONE = auth.permission.NONE
for o in options:
if o == NONE and oacl == NONE:
oaclstr = "%s%s" % (oaclstr, options[o][0])
elif oacl and oacl & o:
oaclstr = "%s%s" % (oaclstr, options[o][0])
else:
oaclstr = "%s-" % oaclstr
if o == NONE and uacl == NONE:
uaclstr = "%s%s" % (uaclstr, options[o][0])
elif uacl and uacl & o:
uaclstr = "%s%s" % (uaclstr, options[o][0])
else:
uaclstr = "%s-" % uaclstr
values = "%s (%s)" % (uaclstr, oaclstr)
tdata += [TD(values, _nowrap="nowrap")]
else:
# Display role descriptions
tdata += [TD(role_desc)]
_class = i % 2 and "even" or "odd"
trows.append(TR(tdata, _class=_class))
tbody = TBODY(trows)
# Aggregate list
items = TABLE(thead, tbody, _id="list", _class="dataTable display")
output.update(items=items, sortby=[[1, "asc"]])
# Add-button
add_btn = A(T("Add Role"), _href=URL(c="admin", f="role",
args=["create"]),
_class="action-btn")
output.update(add_btn=add_btn)
response.view = "admin/role_list.html"
response.s3.actions = []
response.s3.no_sspag = True
elif r.representation == "xls":
# Not implemented yet
r.error(501, manager.ERROR.BAD_FORMAT)
else:
r.error(501, manager.ERROR.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _edit(self, r, **attr):
"""
Create/update role
"""
output = dict()
request = self.request
session = current.session
manager = current.manager
db = current.db
T = current.T
crud_settings = manager.s3.crud
CACL = T("Application Permissions")
FACL = T("Function Permissions")
TACL = T("Table Permissions")
CANCEL = T("Cancel")
auth = manager.auth
model = manager.model
acl_table = auth.permission.table
if r.interactive:
# Get the current record (if any)
if r.record:
output.update(title=T("Edit Role"))
role_id = r.record.id
role_name = r.record.role
role_desc = r.record.description
else:
output.update(title=T("New Role"))
role_id = None
role_name = None
role_desc = None
# Form helpers ----------------------------------------------------
mandatory = lambda l: DIV(l, XML(" "),
SPAN("*", _class="req"))
acl_table.oacl.requires = IS_ACL(auth.permission.PERMISSION_OPTS)
acl_table.uacl.requires = IS_ACL(auth.permission.PERMISSION_OPTS)
acl_widget = lambda f, n, v: \
S3ACLWidget.widget(acl_table[f], v, _id=n, _name=n,
_class="acl-widget")
formstyle = crud_settings.formstyle
using_default = SPAN(T("using default"), _class="using-default")
delete_acl = lambda _id: _id is not None and \
A(T("Delete"),
_href = URL(c="admin", f="acl",
args=[_id, "delete"],
vars=dict(_next=r.url())),
_class = "delete-btn") or using_default
new_acl = SPAN(T("new ACL"), _class="new-acl")
# Role form -------------------------------------------------------
form_rows = formstyle("role_name",
mandatory("%s:" % T("Role Name")),
INPUT(value=role_name,
_name="role_name",
_type="text",
requires=IS_NOT_IN_DB(db,
"auth_group.role",
allowed_override=[role_name])),
"") + \
formstyle("role_desc",
"%s:" % T("Description"),
TEXTAREA(value=role_desc,
_name="role_desc",
_rows="4"),
"")
key_row = P(T("* Required Fields"), _class="red")
role_form = DIV(TABLE(form_rows), key_row, _id="role-form")
# Prepare ACL forms -----------------------------------------------
any = "ANY"
controllers = [c for c in self.controllers.keys()
if c not in self.HIDE_CONTROLLER]
ptables = []
query = (acl_table.deleted != True) & \
(acl_table.group_id == role_id)
records = db(query).select()
acl_forms = []
# Relevant ACLs
acls = Storage()
for acl in records:
if acl.controller in controllers:
if acl.controller not in acls:
acls[acl.controller] = Storage()
if not acl.function:
f = any
else:
if auth.permission.use_facls:
f = acl.function
else:
continue
acls[acl.controller][f] = acl
# Controller ACL table --------------------------------------------
# Table header
thead = THEAD(TR(TH(T("Application")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing ACLs
form_rows = []
i = 0
for c in controllers:
default = Storage(id = None,
controller = c,
function = any,
tablename = None,
uacl = auth.permission.NONE,
oacl = auth.permission.NONE)
if c in acls:
acl_list = acls[c]
if any not in acl_list:
acl_list[any] = default
else:
acl_list = Storage(ANY=default)
acl = acl_list[any]
_class = i % 2 and "even" or "odd"
i += 1
uacl = auth.permission.NONE
oacl = auth.permission.NONE
if acl.oacl is not None:
oacl = acl.oacl
if acl.uacl is not None:
uacl = acl.uacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s_%s_ANY_ANY" % (_id, c)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
cn = self.controllers[c].name_nice
form_rows.append(TR(TD(cn),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Tabs
tabs = [SPAN(A(CACL), _class="tab_here")]
if auth.permission.use_facls:
_class = auth.permission.use_tacls and \
"tab_other" or "tab_last"
tabs.append(SPAN(A(FACL, _class="facl-tab"), _class=_class))
if auth.permission.use_tacls:
tabs.append(SPAN(A(TACL, _class="tacl-tab"),
_class="tab_last"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="controller-acls"))
# Function ACL table ----------------------------------------------
if auth.permission.use_facls:
# Table header
thead = THEAD(TR(TH(T("Application")),
TH(T("Function")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing ACLs
form_rows = []
i = 0
for c in controllers:
if c in acls:
acl_list = acls[c]
else:
continue
keys = acl_list.keys()
keys.sort()
for f in keys:
if f == any:
continue
acl = acl_list[f]
_class = i % 2 and "even" or "odd"
i += 1
uacl = auth.permission.NONE
oacl = auth.permission.NONE
if acl.oacl is not None:
oacl = acl.oacl
if acl.uacl is not None:
uacl = acl.uacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s_%s_%s_ANY" % (_id, c, f)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
cn = self.controllers[c].name_nice
form_rows.append(TR(TD(cn),
TD(f),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Row to enter a new controller ACL
_class = i % 2 and "even" or "odd"
c_opts = [OPTION("", _value=None, _selected="selected")] + \
[OPTION(self.controllers[c].name_nice,
_value=c) for c in controllers]
c_select = SELECT(_name="new_controller", *c_opts)
form_rows.append(TR(
TD(c_select),
TD(INPUT(_type="text", _name="new_function")),
TD(acl_widget("uacl", "new_c_uacl", auth.permission.NONE)),
TD(acl_widget("oacl", "new_c_oacl", auth.permission.NONE)),
TD(new_acl), _class=_class))
# Tabs to change to the other view
tabs = [SPAN(A(CACL, _class="cacl-tab"),
_class="tab_other"),
SPAN(A(FACL), _class="tab_here")]
if auth.permission.use_tacls:
tabs.append(SPAN(A(TACL, _class="tacl-tab"),
_class="tab_last"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="function-acls"))
# Table ACL table -------------------------------------------------
if auth.permission.use_tacls:
query = (acl_table.deleted != True) & \
(acl_table.tablename != None)
tacls = db(query).select(acl_table.tablename, distinct=True)
if tacls:
ptables = [acl.tablename for acl in tacls]
# Relevant ACLs
acls = dict([(acl.tablename, acl) for acl in records
if acl.tablename in ptables])
# Table header
thead = THEAD(TR(TH(T("Tablename")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing table ACLs
form_rows = []
i = 0
for t in ptables:
_class = i % 2 and "even" or "odd"
i += 1
uacl = auth.permission.NONE
oacl = auth.permission.NONE
_id = None
if t in acls:
acl = acls[t]
if acl.uacl is not None:
uacl = acl.uacl
if acl.oacl is not None:
oacl = acl.oacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s_ANY_ANY_%s" % (_id, t)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
form_rows.append(TR(TD(t),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Row to enter a new table ACL
_class = i % 2 and "even" or "odd"
# @todo: find a better way to provide a selection of tables
#all_tables = [t._tablename for t in current.db]
form_rows.append(TR(
TD(INPUT(_type="text", _name="new_table")),
# @todo: doesn't work with conditional models
#requires=IS_EMPTY_OR(IS_IN_SET(all_tables,
#zero=None,
#error_message=T("Undefined Table"))))),
TD(acl_widget("uacl", "new_t_uacl", auth.permission.NONE)),
TD(acl_widget("oacl", "new_t_oacl", auth.permission.NONE)),
TD(new_acl), _class=_class))
# Tabs
tabs = [SPAN(A(CACL, _class="cacl-tab"),
_class="tab_other")]
if auth.permission.use_facls:
tabs.append(SPAN(A(FACL, _class="facl-tab"),
_class="tab_other"))
tabs.append(SPAN(A(TACL), _class="tab_here"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="table-acls"))
# Aggregate ACL Form ----------------------------------------------
acl_form = DIV(acl_forms, _id="table-container")
# Action row
if session.s3.cancel:
cancel = session.s3.cancel
else:
cancel = URL(c="admin", f="role",
vars=request.get_vars)
action_row = DIV(INPUT(_type="submit", _value=T("Save")),
A(CANCEL, _href=cancel, _class="action-lnk"),
_id="action-row")
# Complete form
form = FORM(role_form, acl_form, action_row)
# Append role_id
if role_id:
form.append(INPUT(_type="hidden",
_name="role_id",
value=role_id))
# Process the form ------------------------------------------------
if form.accepts(request.post_vars, session):
vars = form.vars
# Update the role
role = Storage(role=vars.role_name, description=vars.role_desc)
if r.record:
r.record.update_record(**role)
role_id = form.vars.role_id
session.confirmation = '%s "%s" %s' % (T("Role"),
role.role,
T("updated"))
else:
import uuid
role.uuid = uuid.uuid4()
role_id = self.table.insert(**role)
session.confirmation = '%s "%s" %s' % (T("Role"),
role.role,
T("created"))
if role_id:
# Collect the ACLs
acls = Storage()
for v in vars:
if v[:4] == "acl_":
acl_type, name = v[4:].split("_", 1)
n = name.split("_", 3)
i, c, f, t = map(lambda item: \
item != any and item or None, n)
if i.isdigit():
i = int(i)
else:
i = None
name = "%s_%s_%s" % (c, f, t)
if name not in acls:
acls[name] = Storage()
acls[name].update({"id": i,
"group_id": role_id,
"controller": c,
"function": f,
"tablename": t,
"%sacl" % acl_type: vars[v]})
for v in ("new_controller", "new_table"):
if v in vars and vars[v]:
c = v == "new_controller" and \
vars.new_controller or None
f = v == "new_controller" and \
vars.new_function or None
t = v == "new_table" and vars.new_table or None
name = "%s_%s_%s" % (c, f, t)
x = v == "new_table" and "t" or "c"
uacl = vars["new_%s_uacl" % x]
oacl = vars["new_%s_oacl" % x]
if name not in acls:
acls[name] = Storage()
acls[name].update(group_id=role_id,
controller=c,
function=f,
tablename=t,
oacl=oacl,
uacl=uacl)
# Save the ACLs
for acl in acls.values():
_id = acl.pop("id", None)
if _id:
query = (acl_table.deleted != True) & \
(acl_table.id == _id)
db(query).update(**acl)
elif acl.oacl or acl.uacl:
_id = acl_table.insert(**acl)
redirect(URL(f="role", vars=request.get_vars))
output.update(form=form)
if form.errors:
if "new_table" in form.errors:
output.update(acl="table")
elif "new_controller" in form.errors:
output.update(acl="function")
current.response.view = "admin/role_edit.html"
else:
r.error(501, manager.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _delete(self, r, **attr):
"""
Delete role
"""
session = current.session
manager = current.manager
request = self.request
T = current.T
auth = manager.auth
if r.interactive:
if r.record:
role = r.record
role_id = role.id
role_name = role.role
if role_id in self.PROTECTED_ROLES or \
role.protected or role.system:
session.error = '%s "%s" %s' % (T("Role"),
role_name,
T("cannot be deleted."))
redirect(URL(c="admin", f="role",
vars=request.get_vars))
else:
db = current.db
# Delete all ACLs for this role:
acl_table = auth.permission.table
query = (acl_table.deleted != True) & \
(acl_table.group_id == role_id)
db(query).update(deleted=True)
# Remove all memberships:
membership_table = db.auth_membership
query = (membership_table.deleted != True) & \
(membership_table.group_id == role_id)
db(query).update(deleted=True)
# Update roles in session:
session.s3.roles = [role
for role in session.s3.roles
if role != role_id]
# Remove role:
query = (self.table.deleted != True) & \
(self.table.id == role_id)
db(query).update(role=None,
deleted=True)
# Confirmation:
session.confirmation = '%s "%s" %s' % (T("Role"),
role_name,
T("deleted"))
else:
session.error = T("No role to delete")
else:
r.error(501, manager.BAD_FORMAT)
redirect(URL(c="admin", f="role", vars=request.get_vars))
# -------------------------------------------------------------------------
def _roles(self, r, **attr):
"""
View/Update roles of a user
"""
output = dict()
db = current.db
T = current.T
CANCEL = T("Cancel")
session = current.session
manager = current.manager
sr = session.s3.system_roles
request = self.request
crud_settings = manager.s3.crud
formstyle = crud_settings.formstyle
auth = manager.auth
gtable = auth.settings.table_group
mtable = auth.settings.table_membership
if r.interactive:
if r.record:
user = r.record
user_id = user.id
username = user.email
query = (mtable.deleted != True) &\
(mtable.user_id == user_id)
memberships = db(query).select()
memberships = Storage([(str(m.group_id), m.id)
for m in memberships])
roles = db(gtable.deleted != True).select(gtable.ALL)
roles = Storage([(str(g.id), " %s" % g.role)
for g in roles
if g.hidden != True and \
g.id not in (sr.ANONYMOUS,
sr.AUTHENTICATED)])
field = Storage(name="roles",
requires = IS_IN_SET(roles, multiple=True))
widget = CheckboxesWidgetS3.widget(field, memberships.keys())
if session.s3.cancel:
cancel = session.s3.cancel
else:
cancel = r.url(method="")
form = FORM(TABLE(
TR(TD(widget)),
TR(TD(INPUT(_type="submit", _value=T("Save")),
A(CANCEL,
_href=cancel, _class="action-lnk")))))
if form.accepts(request.post_vars, session):
assign = form.vars.roles
for role in roles:
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id == role)
_set = db(query)
if str(role) not in assign:
_set.update(deleted=True)
else:
membership = _set.select(limitby=(0, 1)).first()
if not membership:
mtable.insert(user_id=user_id, group_id=role)
session.confirmation = T("User Updated")
redirect(r.url(method=""))
output.update(title="%s - %s" %
(T("Assigned Roles"), username),
form=form)
current.response.view = "admin/user_roles.html"
else:
session.error = T("No user to update")
redirect(r.url(method=""))
else:
r.error(501, manager.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _users(self, r, **attr):
"""
View/Update users of a role
"""
output = dict()
session = current.session
manager = current.manager
request = self.request
db = current.db
T = current.T
auth = manager.auth
utable = auth.settings.table_user
gtable = auth.settings.table_group
mtable = auth.settings.table_membership
if r.interactive:
if r.record:
role_id = r.record.id
role_name = r.record.role
role_desc = r.record.description
title = "%s: %s" % (T("Role"), role_name)
output.update(title=title,
description=role_desc,
group=role_id)
if auth.settings.username:
username = "username"
else:
username = "email"
# @todo: Audit
users = db().select(utable.ALL)
query = (mtable.deleted != True) & \
(mtable.group_id == role_id)
assigned = db(query).select(mtable.ALL)
assigned_users = [row.user_id for row in assigned]
unassigned_users = [(row.id, row)
for row in users
if row.id not in assigned_users]
# Delete form
if assigned_users:
thead = THEAD(TR(TH(),
TH(T("Name")),
TH(T("Username")),
TH(T("Remove?"))))
trows = []
i = 0
for user in users:
if user.id not in assigned_users:
continue
_class = i % 2 and "even" or "odd"
i += 1
trow = TR(TD(A(), _name="Id"),
TD("%s %s" % (user.first_name,
user.last_name)),
TD(user[username]),
TD(INPUT(_type="checkbox",
_name="d_%s" % user.id,
_class="remove_item")),
_class=_class)
trows.append(trow)
trows.append(TR(TD(), TD(), TD(),
TD(INPUT(_id="submit_delete_button",
_type="submit",
_value=T("Remove")))))
tbody = TBODY(trows)
del_form = TABLE(thead, tbody, _id="list",
_class="dataTable display")
else:
del_form = T("No users with this role")
del_form = FORM(DIV(del_form, _id="table-container"),
_name="del_form")
# Add form
uname = lambda u: \
"%s: %s %s" % (u.id, u.first_name, u.last_name)
u_opts = [OPTION(uname(u[1]),
_value=u[0]) for u in unassigned_users]
if u_opts:
u_opts = [OPTION("",
_value=None, _selected="selected")] + u_opts
u_select = DIV(TABLE(TR(
TD(SELECT(_name="new_user", *u_opts)),
TD(INPUT(_type="submit",
_id="submit_add_button",
_value=T("Add"))))))
else:
u_select = T("No further users can be added")
add_form = FORM(DIV(u_select), _name="add_form")
# Process delete form
if del_form.accepts(request.post_vars,
session, formname="del_form"):
del_ids = [v[2:] for v in del_form.vars
if v[:2] == "d_" and
del_form.vars[v] == "on"]
query = (mtable.deleted != True) & \
(mtable.group_id == role_id) & \
(mtable.user_id.belongs(del_ids))
db(query).update(deleted=True)
redirect(r.url())
# Process add form
if add_form.accepts(request.post_vars,
session, formname="add_form"):
if add_form.vars.new_user:
mtable.insert(group_id=role_id,
user_id=add_form.vars.new_user)
redirect(r.url())
form = DIV(H4(T("Users with this role")), del_form,
H4(T("Add new users")), add_form)
list_btn = A(T("Back to Roles List"),
_href=URL(c="admin", f="role"),
_class="action-btn")
edit_btn = A(T("Edit Role"),
_href=URL(c="admin", f="role",
args=[role_id]),
_class="action-btn")
output.update(form=form, list_btn=list_btn, edit_btn=edit_btn)
current.response.view = "admin/role_users.html"
else:
session.error = T("No role to update")
redirect(r.there())
else:
r.error(501, manager.BAD_FORMAT)
return output
# =============================================================================
class FaceBookAccount(OAuthAccount):
""" OAuth implementation for FaceBook """
AUTH_URL = "https://graph.facebook.com/oauth/authorize"
TOKEN_URL = "https://graph.facebook.com/oauth/access_token"
# -------------------------------------------------------------------------
def __init__(self):
from facebook import GraphAPI, GraphAPIError
self.GraphAPI = GraphAPI
self.GraphAPIError = GraphAPIError
g = dict(GraphAPI=GraphAPI,
GraphAPIError=GraphAPIError,
request=current.request,
response=current.response,
session=current.session,
HTTP=HTTP)
client = current.auth.settings.facebook
OAuthAccount.__init__(self, g, client["id"], client["secret"],
self.AUTH_URL, self.TOKEN_URL,
scope="email,user_about_me,user_location,user_photos,user_relationships,user_birthday,user_website,create_event,user_events,publish_stream")
self.graph = None
# -------------------------------------------------------------------------
def login_url(self, next="/"):
""" Overriding to produce a different redirect_uri """
request = current.request
session = current.session
if not self.accessToken():
if not request.vars.code:
session.redirect_uri = "%s/%s/default/facebook/login" % \
(current.deployment_settings.get_base_public_url(),
request.application)
data = dict(redirect_uri=session.redirect_uri,
response_type="code",
client_id=self.client_id)
if self.args:
data.update(self.args)
auth_request_url = self.auth_url + "?" + urlencode(data)
raise HTTP(307,
"You are not authenticated: you are being redirected to the <a href='" + auth_request_url + "'> authentication server</a>",
Location=auth_request_url)
else:
session.code = request.vars.code
self.accessToken()
#return session.code
return next
# -------------------------------------------------------------------------
def get_user(self):
""" Returns the user using the Graph API. """
db = current.db
auth = current.auth
session = current.session
if not self.accessToken():
return None
if not self.graph:
self.graph = self.GraphAPI((self.accessToken()))
user = None
try:
user = self.graph.get_object_c("me")
except self.GraphAPIError:
self.session.token = None
self.graph = None
if user:
# Check if a user with this email has already registered
#session.facebooklogin = True
table = auth.settings.table_user
query = (table.email == user["email"])
existent = db(query).select(table.id,
table.password,
limitby=(0, 1)).first()
if existent:
#session["%s_setpassword" % existent.id] = existent.password
_user = dict(first_name = user.get("first_name", ""),
last_name = user.get("last_name", ""),
facebookid = user["id"],
facebook = user.get("username", user["id"]),
email = user["email"],
password = existent.password
)
return _user
else:
# b = user["birthday"]
# birthday = "%s-%s-%s" % (b[-4:], b[0:2], b[-7:-5])
# if 'location' in user:
# session.flocation = user['location']
#session["is_new_from"] = "facebook"
auth.s3_send_welcome_email(user)
# auth.initial_user_permission(user) # Called on profile page
_user = dict(first_name = user.get("first_name", ""),
last_name = user.get("last_name", ""),
facebookid = user["id"],
facebook = user.get("username", user["id"]),
nickname = IS_SLUG()(user.get("username", "%(first_name)s-%(last_name)s" % user) + "-" + user['id'][:5])[0],
email = user["email"],
# birthdate = birthday,
about = user.get("bio", ""),
website = user.get("website", ""),
# gender = user.get("gender", "Not specified").title(),
photo_source = 3,
tagline = user.get("link", ""),
registration_type = 2,
)
return _user
# =============================================================================
class GooglePlusAccount(OAuthAccount):
"""
OAuth implementation for Google
https://code.google.com/apis/console/
"""
AUTH_URL = "https://accounts.google.com/o/oauth2/auth"
TOKEN_URL = "https://accounts.google.com/o/oauth2/token"
API_URL = "https://www.googleapis.com/oauth2/v1/userinfo"
# -------------------------------------------------------------------------
def __init__(self):
request = current.request
settings = current.deployment_settings
g = dict(request=request,
response=current.response,
session=current.session,
HTTP=HTTP)
client = current.auth.settings.google
self.globals = g
self.client = client
self.client_id = client["id"]
self.client_secret = client["secret"]
self.auth_url = self.AUTH_URL
self.args = dict(
scope = "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile",
user_agent = "google-api-client-python-plus-cmdline/1.0",
xoauth_displayname = settings.get_system_name(),
response_type = "code",
redirect_uri = "%s/%s/default/google/login" % \
(settings.get_base_public_url(),
request.application),
approval_prompt = "force",
state = "google"
)
self.graph = None
# -------------------------------------------------------------------------
def __build_url_opener(self, uri):
"""
Build the url opener for managing HTTP Basic Athentication
"""
# Create an OpenerDirector with support
# for Basic HTTP Authentication...
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(None,
uri,
self.client_id,
self.client_secret)
opener = urllib2.build_opener(auth_handler)
return opener
# -------------------------------------------------------------------------
def accessToken(self):
"""
Return the access token generated by the authenticating server.
If token is already in the session that one will be used.
Otherwise the token is fetched from the auth server.
"""
session = current.session
if session.token and session.token.has_key("expires"):
expires = session.token["expires"]
# reuse token until expiration
if expires == 0 or expires > time.time():
return session.token["access_token"]
if session.code:
data = dict(client_id = self.client_id,
client_secret = self.client_secret,
redirect_uri = self.args["redirect_uri"],
code = session.code,
grant_type = "authorization_code",
scope = "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile")
# if self.args:
# data.update(self.args)
open_url = None
opener = self.__build_url_opener(self.TOKEN_URL)
try:
open_url = opener.open(self.TOKEN_URL, urlencode(data))
except urllib2.HTTPError, e:
raise Exception(e.read())
finally:
del session.code # throw it away
if open_url:
try:
session.token = json.loads(open_url.read())
session.token["expires"] = int(session.token["expires_in"]) + \
time.time()
finally:
opener.close()
return session.token["access_token"]
session.token = None
return None
# -------------------------------------------------------------------------
def login_url(self, next="/"):
""" Overriding to produce a different redirect_uri """
request = current.request
session = current.session
if not self.accessToken():
if not request.vars.code:
session.redirect_uri = self.args["redirect_uri"]
data = dict(redirect_uri=session.redirect_uri,
response_type="code",
client_id=self.client_id)
if self.args:
data.update(self.args)
auth_request_url = self.auth_url + "?" + urlencode(data)
raise HTTP(307,
"You are not authenticated: you are being redirected to the <a href='" + auth_request_url + "'> authentication server</a>",
Location=auth_request_url)
else:
session.code = request.vars.code
self.accessToken()
#return session.code
return next
# -------------------------------------------------------------------------
def get_user(self):
""" Returns the user using the Graph API. """
db = current.db
auth = current.auth
session = current.session
if not self.accessToken():
return None
user = None
try:
user = self.call_api()
except Exception, e:
print str(e)
session.token = None
if user:
# Check if a user with this email has already registered
#session.googlelogin = True
table = auth.settings.table_user
query = (table.email == user["email"])
existent = db(query).select(table.id,
table.password,
limitby=(0, 1)).first()
if existent:
#session["%s_setpassword" % existent.id] = existent.password
_user = dict(
#first_name = user.get("given_name", user["name"]),
#last_name = user.get("family_name", user["name"]),
googleid = user["id"],
email = user["email"],
password = existent.password
)
return _user
else:
# b = user["birthday"]
# birthday = "%s-%s-%s" % (b[-4:], b[0:2], b[-7:-5])
# if "location" in user:
# session.flocation = user["location"]
#session["is_new_from"] = "google"
auth.s3_send_welcome_email(user)
_user = dict(
first_name = user.get("given_name", user["name"].split()[0]),
last_name = user.get("family_name", user["name"].split()[-1]),
googleid = user["id"],
nickname = "%(first_name)s-%(last_name)s-%(id)s" % dict(first_name=user["name"].split()[0].lower(), last_name=user["name"].split()[-1].lower(), id=user['id'][:5]),
email = user["email"],
# birthdate = birthday,
website = user.get("link", ""),
# gender = user.get("gender", "Not specified").title(),
photo_source = 6 if user.get("picture", None) else 2,
googlepicture = user.get("picture", ""),
registration_type = 3,
)
return _user
# -------------------------------------------------------------------------
def call_api(self):
api_return = urllib.urlopen("https://www.googleapis.com/oauth2/v1/userinfo?access_token=%s" % self.accessToken())
user = json.loads(api_return.read())
if user:
return user
else:
self.session.token = None
return None
# END =========================================================================
|
flavour/iscram
|
modules/s3/s3aaa.py
|
Python
|
mit
| 198,273
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return render(request, 'todo/index.html')
|
deshmukhmayur/django-todo
|
todo/views.py
|
Python
|
mit
| 166
|
from stard.services import BaseService
class Service(BaseService):
def init_service(self):
self.children = {self.service('child')}
|
DexterLB/stard
|
src/stard/test_samples/father.py
|
Python
|
mit
| 144
|
#!/usr/bin/python
import sys
import logging
import time
import subprocess
import shlex
import os
import StringIO
import tempfile
from watchdog.observers import Observer
from watchdog.events import FileCreatedEvent, RegexMatchingEventHandler
class CreatedFilesHandler(RegexMatchingEventHandler):
def on_created(self, event):
if isinstance(event, FileCreatedEvent):
src_path = event.src_path
logging.info('Detected a new KML file creation... {}'.format(event.src_path))
logging.info('Executing Matlab.')
file_name_no_ext = '.'.join(os.path.basename(src_path).split('.')[:-1])
output_dir = os.path.join(os.path.dirname(os.path.dirname(src_path)), 'OUTPUT', file_name_no_ext)
try:
os.mkdir(output_dir)
logging.info('Directory created.')
except OSError:
pass # Directory is already created
command = "/Applications/MATLAB_R2014b.app/bin/matlab -nodisplay -nosplash -r \"try, IC_GEPlugFunction('{}', '{}'); exit(0); catch, err = lasterror; disp(err); disp(err.message); disp(err.stack); disp(err.identifier); exit(1); end;\"".format(
src_path, output_dir)
args = shlex.split(command)
(out_buffer, out_path) = tempfile.mkstemp()
try:
subprocess.check_call(args, stdout=out_buffer, stderr=subprocess.STDOUT)
logging.info('Command successful, continuing.')
except subprocess.CalledProcessError:
logging.error('Command failed, writing error logs.')
with open(os.path.join(output_dir, 'error.txt'), 'w') as ferr:
ferr.write("Oh Snap! There was an error in your input, please fix it and try again.\n")
with open(os.path.join(output_dir, 'error.log'), 'w') as flog:
readable_file = os.fdopen(out_buffer, 'r')
readable_file.seek(0)
flog.write(readable_file.read())
readable_file.close()
finally:
os.remove(out_path)
logging.info('Command executed, continuing the watch.')
def main():
path = sys.argv[1] if len(sys.argv) > 1 else '.'
logging.basicConfig(level=logging.INFO)
event_handler = CreatedFilesHandler(regexes=[r'.*/INPUT/.*\.kml$'], ignore_directories=True)
observer = Observer()
observer.schedule(event_handler, path, recursive=True)
observer.start()
try:
while True:
time.sleep(5)
except KeyboardInterrupt:
observer.stop()
observer.join()
if __name__ == '__main__':
sys.exit(main())
|
hydrosolutions/irrigation-calc
|
file_watch.py
|
Python
|
mit
| 2,693
|
##############################
# #
# Instructions #
# #
##############################
# To run, use the following command:
# $ python seating.py <input_file>
# where <input_file> is the filename with the question's input
import sys
import re
# Check to make sure correct number of arguments supplied
if (len(sys.argv) != 2):
print('Invalid number of arguments!')
sys.exit()
# Read the input from the file provided as argument
input_file = open(sys.argv[1])
puzzle_input = input_file.readlines()
input_file.close()
# Regular expression to get the names and happiness changes of each pair
regex_happiness = re.compile(r'(\w+) would (gain|lose) (\d+) happiness units by sitting next to (\w+).')
happiness = {}
possibilities = []
# For every line in input
for line in puzzle_input:
info = re.match(regex_happiness, line)
# Check if the person is gaining or losing happiness
mult = 1
if info.group(2) == 'lose':
mult = -1
# Add the person and their neighbor as an entry in the dict
if info.group(1) in happiness:
happiness[info.group(1)][info.group(4)] = mult * int(info.group(3))
else:
happiness[info.group(1)] = {info.group(4): mult * int(info.group(3))}
# Adding myself to the table
happiness['Joseph'] = {}
for person in happiness:
if not person == 'Joseph':
happiness[person]['Joseph'] = 0
happiness['Joseph'][person] = 0
# Finds all the possibilities from a person to neighbors which have not been tried so far
# and adds the total change in happiness together
def calc_possibilities(first_person, person, visited, total_so_far):
global happiness
global possibilities
global best_so_far
# Make a copy of the list and add a new entry
visited = visited[:]
visited.append(person)
# If all of the people are in the list, add the total change in happiness to the possibilities
if len(visited) == len(happiness):
total_so_far += happiness[first_person][person] + happiness[person][first_person]
possibilities.append(total_so_far)
# For each person the person can sit beside
for neighbor in happiness[person]:
# If they're already in the list, skip them
if neighbor in visited:
continue
# Get all the possibilities of the next person's neighbor
calc_possibilities(first_person, neighbor, visited, total_so_far + happiness[neighbor][person] + happiness[person][neighbor])
# Start with each person and go around the table, trying every combination
for person in happiness:
for neighbor in happiness[person]:
calc_possibilities(person, neighbor, [person], happiness[person][neighbor] + happiness[neighbor][person])
# Print the overall best possibility
print('The best seating arrangement has a combined happiness of', max(possibilities))
|
joseph-roque/advent-of-code
|
day_13/seatmyself.py
|
Python
|
mit
| 2,768
|
from datetime import date, time
NO_TITLE = u"__NO_TITLE__"
NO_AUTHOR_NAME = 'None'
NO_CATEGORY_NAME = 'None'
NON_EXISTENT_ARTICLE_TITLE = 'NON_EXISTENT'
NO_DATE = date(1970, 01, 01)
NO_TIME = time(0, 0)
NO_URL = u"__NO_URL__"
UNFINISHED_TAG = u"unfinished"
GHOST_LINK_TAG = u"ghost link"
GHOST_LINK_TITLE = u"__GHOST_LINK__"
GHOST_LINK_URL = u"__GHOST_LINK__"
PAYWALLED_CONTENT = u"__PAYWALLED__"
RENDERED_STORIFY_TITLE = u"__RENDERED_STORIFY__"
RENDERED_TWEET_TITLE = u"__RENDERED_TWEET__"
EMBEDDED_VIDEO_TITLE = u"__EMBEDDED_VIDEO_TITLE__"
EMBEDDED_VIDEO_URL = u"__EMBEDDED_VIDEO_URL__"
|
sevas/csxj-crawler
|
csxj/datasources/parser_tools/constants.py
|
Python
|
mit
| 590
|
#
# (c) Dave Kirby 2001 - 2005
# mock@thedeveloperscoach.com
#
# Original call interceptor and call assertion code by Phil Dawes (pdawes@users.sourceforge.net)
# Call interceptor code enhanced by Bruce Cropley (cropleyb@yahoo.com.au)
#
# This Python module and associated files are released under the FreeBSD
# license. Essentially, you can do what you like with it except pretend you wrote
# it yourself.
#
#
# Copyright (c) 2005, Dave Kirby
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of this library nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# mock@thedeveloperscoach.com
"""
Mock object library for Python. Mock objects can be used when unit testing
to remove a dependency on another production class. They are typically used
when the dependency would either pull in lots of other classes, or
significantly slow down the execution of the test.
They are also used to create exceptional conditions that cannot otherwise
be easily triggered in the class under test.
"""
__version__ = "0.1.0"
# Added in Python 2.1
import inspect
import re
class MockInterfaceError(Exception):
pass
class Mock(object):
"""
The Mock class emulates any other class for testing purposes.
All method calls are stored for later examination.
"""
def __init__(self, returnValues=None, realClass=None):
"""
The Mock class constructor takes a dictionary of method names and
the values they return. Methods that are not in the returnValues
dictionary will return None.
You may also supply a class whose interface is being mocked.
All calls will be checked to see if they appear in the original
interface. Any calls to methods not appearing in the real class
will raise a MockInterfaceError. Any calls that would fail due to
non-matching parameter lists will also raise a MockInterfaceError.
Both of these help to prevent the Mock class getting out of sync
with the class it is Mocking.
"""
self.mockCalledMethods = {}
self.mockAllCalledMethods = []
self.mockReturnValues = returnValues or {}
self.mockExpectations = {}
self.realClassMethods = None
if realClass:
self.realClassMethods = dict(inspect.getmembers(realClass, inspect.isroutine))
for retMethod in self.mockReturnValues.keys():
if not self.realClassMethods.has_key(retMethod):
raise MockInterfaceError("Return value supplied for method '%s' that was not in the original class" % retMethod)
self._setupSubclassMethodInterceptors()
def _setupSubclassMethodInterceptors(self):
methods = inspect.getmembers(self.__class__,inspect.isroutine)
baseMethods = dict(inspect.getmembers(Mock, inspect.ismethod))
for m in methods:
name = m[0]
# Don't record calls to methods of Mock base class.
if not name in baseMethods:
self.__dict__[name] = MockCallable(name, self, handcrafted=True)
def __getattr__(self, name):
return MockCallable(name, self)
def mockAddReturnValues(self, **methodReturnValues ):
self.mockReturnValues.update(methodReturnValues)
def mockSetExpectation(self, name, testFn, after=0, until=0):
self.mockExpectations.setdefault(name, []).append((testFn,after,until))
def _checkInterfaceCall(self, name, callParams, callKwParams):
"""
Check that a call to a method of the given name to the original
class with the given parameters would not fail. If it would fail,
raise a MockInterfaceError.
Based on the Python 2.3.3 Reference Manual section 5.3.4: Calls.
"""
if self.realClassMethods == None:
return
if not self.realClassMethods.has_key(name):
raise MockInterfaceError("Calling mock method '%s' that was not found in the original class" % name)
func = self.realClassMethods[name]
try:
args, varargs, varkw, defaults = inspect.getargspec(func)
except TypeError:
# func is not a Python function. It is probably a builtin,
# such as __repr__ or __coerce__. TODO: Checking?
# For now assume params are OK.
return
# callParams doesn't include self; args does include self.
numPosCallParams = 1 + len(callParams)
if numPosCallParams > len(args) and not varargs:
raise MockInterfaceError("Original %s() takes at most %s arguments (%s given)" %
(name, len(args), numPosCallParams))
# Get the number of positional arguments that appear in the call,
# also check for duplicate parameters and unknown parameters
numPosSeen = _getNumPosSeenAndCheck(numPosCallParams, callKwParams, args, varkw)
lenArgsNoDefaults = len(args) - len(defaults or [])
if numPosSeen < lenArgsNoDefaults:
raise MockInterfaceError("Original %s() takes at least %s arguments (%s given)" % (name, lenArgsNoDefaults, numPosSeen))
def mockGetAllCalls(self):
"""
Return a list of MockCall objects,
representing all the methods in the order they were called.
"""
return self.mockAllCalledMethods
getAllCalls = mockGetAllCalls # deprecated - kept for backward compatibility
def mockGetNamedCalls(self, methodName):
"""
Return a list of MockCall objects,
representing all the calls to the named method in the order they were called.
"""
return self.mockCalledMethods.get(methodName, [])
getNamedCalls = mockGetNamedCalls # deprecated - kept for backward compatibility
def mockCheckCall(self, index, name, *args, **kwargs):
'''test that the index-th call had the specified name and parameters'''
call = self.mockAllCalledMethods[index]
assert name == call.getName(), "%r != %r" % (name, call.getName())
call.checkArgs(*args, **kwargs)
def _getNumPosSeenAndCheck(numPosCallParams, callKwParams, args, varkw):
"""
Positional arguments can appear as call parameters either named as
a named (keyword) parameter, or just as a value to be matched by
position. Count the positional arguments that are given by either
keyword or position, and check for duplicate specifications.
Also check for arguments specified by keyword that do not appear
in the method's parameter list.
"""
posSeen = {}
for arg in args[:numPosCallParams]:
posSeen[arg] = True
for kwp in callKwParams:
if posSeen.has_key(kwp):
raise MockInterfaceError("%s appears as both a positional and named parameter." % kwp)
if kwp in args:
posSeen[kwp] = True
elif not varkw:
raise MockInterfaceError("Original method does not have a parameter '%s'" % kwp)
return len(posSeen)
class MockCall:
"""
MockCall records the name and parameters of a call to an instance
of a Mock class. Instances of MockCall are created by the Mock class,
but can be inspected later as part of the test.
"""
def __init__(self, name, params, kwparams ):
self.name = name
self.params = params
self.kwparams = kwparams
def checkArgs(self, *args, **kwargs):
assert args == self.params, "%r != %r" % (args, self.params)
assert kwargs == self.kwparams, "%r != %r" % (kwargs, self.kwparams)
def getParam( self, n ):
if isinstance(n, int):
return self.params[n]
elif isinstance(n, str):
return self.kwparams[n]
else:
raise IndexError, 'illegal index type for getParam'
def getNumParams(self):
return len(self.params)
def getNumKwParams(self):
return len(self.kwparams)
def getName(self):
return self.name
#pretty-print the method call
def __str__(self):
s = self.name + "("
sep = ''
for p in self.params:
s = s + sep + repr(p)
sep = ', '
items = self.kwparams.items()
items.sort()
for k,v in items:
s = s + sep + k + '=' + repr(v)
sep = ', '
s = s + ')'
return s
def __repr__(self):
return self.__str__()
class MockCallable:
"""
Intercepts the call and records it, then delegates to either the mock's
dictionary of mock return values that was passed in to the constructor,
or a handcrafted method of a Mock subclass.
"""
def __init__(self, name, mock, handcrafted=False):
self.name = name
self.mock = mock
self.handcrafted = handcrafted
def __call__(self, *params, **kwparams):
self.mock._checkInterfaceCall(self.name, params, kwparams)
thisCall = self.recordCall(params,kwparams)
self.checkExpectations(thisCall, params, kwparams)
return self.makeCall(params, kwparams)
def recordCall(self, params, kwparams):
"""
Record the MockCall in an ordered list of all calls, and an ordered
list of calls for that method name.
"""
thisCall = MockCall(self.name, params, kwparams)
calls = self.mock.mockCalledMethods.setdefault(self.name, [])
calls.append(thisCall)
self.mock.mockAllCalledMethods.append(thisCall)
return thisCall
def makeCall(self, params, kwparams):
if self.handcrafted:
allPosParams = (self.mock,) + params
func = _findFunc(self.mock.__class__, self.name)
if not func:
raise NotImplementedError
return func(*allPosParams, **kwparams)
else:
returnVal = self.mock.mockReturnValues.get(self.name)
if isinstance(returnVal, ReturnValuesBase):
returnVal = returnVal.next()
return returnVal
def checkExpectations(self, thisCall, params, kwparams):
if self.name in self.mock.mockExpectations:
callsMade = len(self.mock.mockCalledMethods[self.name])
for (expectation, after, until) in self.mock.mockExpectations[self.name]:
if callsMade > after and (until==0 or callsMade < until):
assert expectation(self.mock, thisCall, len(self.mock.mockAllCalledMethods)-1), 'Expectation failed: '+str(thisCall)
def _findFunc(cl, name):
""" Depth first search for a method with a given name. """
if cl.__dict__.has_key(name):
return cl.__dict__[name]
for base in cl.__bases__:
func = _findFunc(base, name)
if func:
return func
return None
class ReturnValuesBase:
def next(self):
try:
return self.iter.next()
except StopIteration:
raise AssertionError("No more return values")
def __iter__(self):
return self
class ReturnValues(ReturnValuesBase):
def __init__(self, *values):
self.iter = iter(values)
class ReturnIterator(ReturnValuesBase):
def __init__(self, iterator):
self.iter = iter(iterator)
def expectParams(*params, **keywords):
'''check that the callObj is called with specified params and keywords
'''
def fn(mockObj, callObj, idx):
return callObj.params == params and callObj.kwparams == keywords
return fn
def expectAfter(*methods):
'''check that the function is only called after all the functions in 'methods'
'''
def fn(mockObj, callObj, idx):
calledMethods = [method.getName() for method in mockObj.mockGetAllCalls()]
#skip last entry, since that is the current call
calledMethods = calledMethods[:-1]
for method in methods:
if method not in calledMethods:
return False
return True
return fn
def expectException(exception, *args, **kwargs):
''' raise an exception when the method is called
'''
def fn(mockObj, callObj, idx):
raise exception(*args, **kwargs)
return fn
def expectParam(paramIdx, cond):
'''check that the callObj is called with parameter specified by paramIdx (a position index or keyword)
fulfills the condition specified by cond.
cond is a function that takes a single argument, the value to test.
'''
def fn(mockObj, callObj, idx):
param = callObj.getParam(paramIdx)
return cond(param)
return fn
def EQ(value):
def testFn(param):
return param == value
return testFn
def NE(value):
def testFn(param):
return param != value
return testFn
def GT(value):
def testFn(param):
return param > value
return testFn
def LT(value):
def testFn(param):
return param < value
return testFn
def GE(value):
def testFn(param):
return param >= value
return testFn
def LE(value):
def testFn(param):
return param <= value
return testFn
def AND(*condlist):
def testFn(param):
for cond in condlist:
if not cond(param):
return False
return True
return testFn
def OR(*condlist):
def testFn(param):
for cond in condlist:
if cond(param):
return True
return False
return testFn
def NOT(cond):
def testFn(param):
return not cond(param)
return testFn
def MATCHES(regex, *args, **kwargs):
compiled_regex = re.compile(regex, *args, **kwargs)
def testFn(param):
return compiled_regex.match(param) != None
return testFn
def SEQ(*sequence):
iterator = iter(sequence)
def testFn(param):
try:
cond = iterator.next()
except StopIteration:
raise AssertionError('SEQ exhausted')
return cond(param)
return testFn
def IS(instance):
def testFn(param):
return param is instance
return testFn
def ISINSTANCE(class_):
def testFn(param):
return isinstance(param, class_)
return testFn
def ISSUBCLASS(class_):
def testFn(param):
return issubclass(param, class_)
return testFn
def CONTAINS(val):
def testFn(param):
return val in param
return testFn
def IN(container):
def testFn(param):
return param in container
return testFn
def HASATTR(attr):
def testFn(param):
return hasattr(param, attr)
return testFn
def HASMETHOD(method):
def testFn(param):
return hasattr(param, method) and callable(getattr(param, method))
return testFn
CALLABLE = callable
|
cropleyb/pentai
|
pentai/base/mock.py
|
Python
|
mit
| 16,157
|
# encoding: utf-8
"""
Paging capabilities for IPython.core
Authors:
* Brian Granger
* Fernando Perez
Notes
-----
For now this uses ipapi, so it can't be in IPython.utils. If we can get
rid of that dependency, we could move it there.
-----
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
import os
import re
import sys
import tempfile
from io import UnsupportedOperation
from IPython import get_ipython
from IPython.core.error import TryNext
from IPython.utils.data import chop
from IPython.utils import io
from IPython.utils.process import system
from IPython.utils.terminal import get_terminal_size
from IPython.utils import py3compat
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
esc_re = re.compile(r"(\x1b[^m]+m)")
def page_dumb(strng, start=0, screen_lines=25):
"""Very dumb 'pager' in Python, for when nothing else works.
Only moves forward, same interface as page(), except for pager_cmd and
mode."""
out_ln = strng.splitlines()[start:]
screens = chop(out_ln, screen_lines - 1)
if len(screens) == 1:
print(os.linesep.join(screens[0]), file=io.stdout)
else:
last_escape = ""
for scr in screens[0:-1]:
hunk = os.linesep.join(scr)
print(last_escape + hunk, file=io.stdout)
if not page_more():
return
esc_list = esc_re.findall(hunk)
if len(esc_list) > 0:
last_escape = esc_list[-1]
print(last_escape + os.linesep.join(screens[-1]), file=io.stdout)
def _detect_screen_size(screen_lines_def):
"""Attempt to work out the number of lines on the screen.
This is called by page(). It can raise an error (e.g. when run in the
test suite), so it's separated out so it can easily be called in a try block.
"""
TERM = os.environ.get('TERM', None)
if not((TERM == 'xterm' or TERM == 'xterm-color') and sys.platform != 'sunos5'):
# curses causes problems on many terminals other than xterm, and
# some termios calls lock up on Sun OS5.
return screen_lines_def
try:
import termios
import curses
except ImportError:
return screen_lines_def
# There is a bug in curses, where *sometimes* it fails to properly
# initialize, and then after the endwin() call is made, the
# terminal is left in an unusable state. Rather than trying to
# check everytime for this (by requesting and comparing termios
# flags each time), we just save the initial terminal state and
# unconditionally reset it every time. It's cheaper than making
# the checks.
term_flags = termios.tcgetattr(sys.stdout)
# Curses modifies the stdout buffer size by default, which messes
# up Python's normal stdout buffering. This would manifest itself
# to IPython users as delayed printing on stdout after having used
# the pager.
#
# We can prevent this by manually setting the NCURSES_NO_SETBUF
# environment variable. For more details, see:
# http://bugs.python.org/issue10144
NCURSES_NO_SETBUF = os.environ.get('NCURSES_NO_SETBUF', None)
os.environ['NCURSES_NO_SETBUF'] = ''
# Proceed with curses initialization
try:
scr = curses.initscr()
except AttributeError:
# Curses on Solaris may not be complete, so we can't use it there
return screen_lines_def
screen_lines_real, screen_cols = scr.getmaxyx()
curses.endwin()
# Restore environment
if NCURSES_NO_SETBUF is None:
del os.environ['NCURSES_NO_SETBUF']
else:
os.environ['NCURSES_NO_SETBUF'] = NCURSES_NO_SETBUF
# Restore terminal state in case endwin() didn't.
termios.tcsetattr(sys.stdout, termios.TCSANOW, term_flags)
# Now we have what we needed: the screen size in rows/columns
return screen_lines_real
# print '***Screen size:',screen_lines_real,'lines x',\
# screen_cols,'columns.' # dbg
def page(strng, start=0, screen_lines=0, pager_cmd=None):
"""Display a string, piping through a pager after a certain length.
strng can be a mime-bundle dict, supplying multiple representations,
keyed by mime-type.
The screen_lines parameter specifies the number of *usable* lines of your
terminal screen (total lines minus lines you need to reserve to show other
information).
If you set screen_lines to a number <=0, page() will try to auto-determine
your screen size and will only use up to (screen_size+screen_lines) for
printing, paging after that. That is, if you want auto-detection but need
to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for
auto-detection without any lines reserved simply use screen_lines = 0.
If a string won't fit in the allowed lines, it is sent through the
specified pager command. If none given, look for PAGER in the environment,
and ultimately default to less.
If no system pager works, the string is sent through a 'dumb pager'
written in python, very simplistic.
"""
# for compatibility with mime-bundle form:
if isinstance(strng, dict):
strng = strng['text/plain']
# Some routines may auto-compute start offsets incorrectly and pass a
# negative value. Offset to 0 for robustness.
start = max(0, start)
# first, try the hook
ip = get_ipython()
if ip:
try:
ip.hooks.show_in_pager(strng)
return
except TryNext:
pass
# Ugly kludge, but calling curses.initscr() flat out crashes in emacs
TERM = os.environ.get('TERM', 'dumb')
if TERM in ['dumb', 'emacs'] and os.name != 'nt':
print(strng)
return
# chop off the topmost part of the string we don't want to see
str_lines = strng.splitlines()[start:]
str_toprint = os.linesep.join(str_lines)
num_newlines = len(str_lines)
len_str = len(str_toprint)
# Dumb heuristics to guesstimate number of on-screen lines the string
# takes. Very basic, but good enough for docstrings in reasonable
# terminals. If someone later feels like refining it, it's not hard.
numlines = max(num_newlines, int(len_str / 80) + 1)
screen_lines_def = get_terminal_size()[1]
# auto-determine screen size
if screen_lines <= 0:
try:
screen_lines += _detect_screen_size(screen_lines_def)
except (TypeError, UnsupportedOperation):
print(str_toprint, file=io.stdout)
return
# print 'numlines',numlines,'screenlines',screen_lines # dbg
if numlines <= screen_lines:
# print '*** normal print' # dbg
print(str_toprint, file=io.stdout)
else:
# Try to open pager and default to internal one if that fails.
# All failure modes are tagged as 'retval=1', to match the return
# value of a failed system command. If any intermediate attempt
# sets retval to 1, at the end we resort to our own page_dumb() pager.
pager_cmd = get_pager_cmd(pager_cmd)
pager_cmd += ' ' + get_pager_start(pager_cmd, start)
if os.name == 'nt':
if pager_cmd.startswith('type'):
# The default WinXP 'type' command is failing on complex
# strings.
retval = 1
else:
fd, tmpname = tempfile.mkstemp('.txt')
try:
os.close(fd)
with open(tmpname, 'wt') as tmpfile:
tmpfile.write(strng)
cmd = "%s < %s" % (pager_cmd, tmpname)
# tmpfile needs to be closed for windows
if os.system(cmd):
retval = 1
else:
retval = None
finally:
os.remove(tmpname)
else:
try:
retval = None
# if I use popen4, things hang. No idea why.
#pager,shell_out = os.popen4(pager_cmd)
pager = os.popen(pager_cmd, 'w')
try:
pager_encoding = pager.encoding or sys.stdout.encoding
pager.write(py3compat.cast_bytes_py2(
strng, encoding=pager_encoding))
finally:
retval = pager.close()
except IOError as msg: # broken pipe when user quits
if msg.args == (32, 'Broken pipe'):
retval = None
else:
retval = 1
except OSError:
# Other strange problems, sometimes seen in Win2k/cygwin
retval = 1
if retval is not None:
page_dumb(strng, screen_lines=screen_lines)
def page_file(fname, start=0, pager_cmd=None):
"""Page a file, using an optional pager command and starting line.
"""
pager_cmd = get_pager_cmd(pager_cmd)
pager_cmd += ' ' + get_pager_start(pager_cmd, start)
try:
if os.environ['TERM'] in ['emacs', 'dumb']:
raise EnvironmentError
system(pager_cmd + ' ' + fname)
except:
try:
if start > 0:
start -= 1
page(open(fname).read(), start)
except:
print('Unable to show file', repr(fname))
def get_pager_cmd(pager_cmd=None):
"""Return a pager command.
Makes some attempts at finding an OS-correct one.
"""
if os.name == 'posix':
default_pager_cmd = 'less -r' # -r for color control sequences
elif os.name in ['nt', 'dos']:
default_pager_cmd = 'type'
if pager_cmd is None:
try:
pager_cmd = os.environ['PAGER']
except:
pager_cmd = default_pager_cmd
return pager_cmd
def get_pager_start(pager, start):
"""Return the string for paging files with an offset.
This is the '+N' argument which less and more (under Unix) accept.
"""
if pager in ['less', 'more']:
if start:
start_string = '+' + str(start)
else:
start_string = ''
else:
start_string = ''
return start_string
# (X)emacs on win32 doesn't like to be bypassed with msvcrt.getch()
if os.name == 'nt' and os.environ.get('TERM', 'dumb') != 'emacs':
import msvcrt
def page_more():
""" Smart pausing between pages
@return: True if need print more lines, False if quit
"""
io.stdout.write('---Return to continue, q to quit--- ')
ans = msvcrt.getwch()
if ans in ("q", "Q"):
result = False
else:
result = True
io.stdout.write("\b" * 37 + " " * 37 + "\b" * 37)
return result
else:
def page_more():
ans = py3compat.input('---Return to continue, q to quit--- ')
if ans.lower().startswith('q'):
return False
else:
return True
def snip_print(str, width=75, print_full=0, header=''):
"""Print a string snipping the midsection to fit in width.
print_full: mode control:
- 0: only snip long strings
- 1: send to page() directly.
- 2: snip long strings and ask for full length viewing with page()
Return 1 if snipping was necessary, 0 otherwise."""
if print_full == 1:
page(header + str)
return 0
print(header, end=' ')
if len(str) < width:
print(str)
snip = 0
else:
whalf = int((width - 5) / 2)
print(str[:whalf] + ' <...> ' + str[-whalf:])
snip = 1
if snip and print_full == 2:
if py3compat.input(header + ' Snipped. View (y/n)? [N]').lower() == 'y':
page(str)
return snip
|
mattvonrocketstein/smash
|
smashlib/ipy3x/core/page.py
|
Python
|
mit
| 12,350
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.exceptions import ImproperlyConfigured
import os
FREEZE_ROOT = getattr(settings, 'FREEZE_ROOT', os.path.abspath(os.path.join(settings.MEDIA_ROOT, '../freeze/')) )
if not os.path.isabs(FREEZE_ROOT):
raise ImproperlyConfigured('settings.FREEZE_ROOT should be an absolute path')
if settings.MEDIA_ROOT.find(FREEZE_ROOT) == 0 or settings.STATIC_ROOT.find(FREEZE_ROOT) == 0:
raise ImproperlyConfigured('settings.FREEZE_ROOT cannot be a subdirectory of MEDIA_ROOT or STATIC_ROOT')
FREEZE_MEDIA_ROOT = settings.MEDIA_ROOT
FREEZE_MEDIA_URL = settings.MEDIA_URL
FREEZE_STATIC_ROOT = settings.STATIC_ROOT
FREEZE_STATIC_URL = settings.STATIC_URL
FREEZE_USE_HTTPS = getattr(settings, 'FREEZE_USE_HTTPS', False)
FREEZE_PROTOCOL = 'https://' if FREEZE_USE_HTTPS else 'http://'
FREEZE_SITE_URL = getattr(settings, 'FREEZE_SITE_URL', None)
if(FREEZE_SITE_URL == None):
# handled this way to remove DB dependency unless strictly needed. If FREEZE_SITE_URL is set then collectstatic
# can be called without needing a db setup, which is useful for build servers
FREEZE_SITE_URL = '%s%s' % (FREEZE_PROTOCOL, Site.objects.get_current().domain,)
FREEZE_BASE_URL = getattr(settings, 'FREEZE_BASE_URL', None)
if FREEZE_BASE_URL:
if FREEZE_BASE_URL.startswith('/') or FREEZE_BASE_URL.startswith('http'):
if not FREEZE_BASE_URL.endswith('/'):
FREEZE_BASE_URL += '/'
else:
raise ImproperlyConfigured('settings.FREEZE_BASE_URL should start with \'/\' or \'http\' or be an empty string')
FREEZE_RELATIVE_URLS = getattr(settings, 'FREEZE_RELATIVE_URLS', False)
if FREEZE_RELATIVE_URLS and FREEZE_BASE_URL != None:
raise ImproperlyConfigured('settings.FREEZE_RELATIVE_URLS cannot be set to True if FREEZE_BASE_URL is specified')
FREEZE_LOCAL_URLS = getattr(settings, 'FREEZE_LOCAL_URLS', False)
if FREEZE_LOCAL_URLS and not FREEZE_RELATIVE_URLS:
raise ImproperlyConfigured('settings.FREEZE_LOCAL_URLS cannot be set to True if FREEZE_RELATIVE_URLS is set to False')
FREEZE_FOLLOW_SITEMAP_URLS = getattr(settings, 'FREEZE_FOLLOW_SITEMAP_URLS', True)
FREEZE_FOLLOW_HTML_URLS = getattr(settings, 'FREEZE_FOLLOW_HTML_URLS', True)
FREEZE_REPORT_INVALID_URLS = getattr(settings, 'FREEZE_REPORT_INVALID_URLS', False)
FREEZE_REPORT_INVALID_URLS_SUBJECT = getattr(settings, 'FREEZE_REPORT_INVALID_URLS_SUBJECT', '[freeze] invalid urls')
FREEZE_INCLUDE_MEDIA = getattr(settings, 'FREEZE_INCLUDE_MEDIA', True)
FREEZE_INCLUDE_STATIC = getattr(settings, 'FREEZE_INCLUDE_STATIC', True)
FREEZE_ZIP_ALL = getattr(settings, 'FREEZE_ZIP_ALL', False)
FREEZE_ZIP_NAME = getattr(settings, 'FREEZE_ZIP_NAME', 'freeze')
if len(FREEZE_ZIP_NAME) >= 4 and FREEZE_ZIP_NAME[-4:].lower() != '.zip':
FREEZE_ZIP_NAME += '.zip'
FREEZE_ZIP_PATH = os.path.abspath(os.path.join(FREEZE_ROOT, FREEZE_ZIP_NAME))
FREEZE_REQUEST_HEADERS = getattr(settings, 'FREEZE_REQUEST_HEADERS', {'user-agent': 'django-freeze'})
|
fabiocaccamo/django-freeze
|
freeze/settings.py
|
Python
|
mit
| 3,077
|
from flask_restful import marshal_with, reqparse
from flask import abort, g
from flask import Blueprint as FlaskBlueprint
from flask_restful import fields
from sqlalchemy.orm.session import make_transient
import logging
import uuid
from pebbles.models import db, BlueprintTemplate, Plugin
from pebbles.forms import BlueprintTemplateForm
from pebbles.server import restful
from pebbles.views.commons import auth, requires_group_manager_or_admin
from pebbles.utils import requires_admin, parse_maximum_lifetime
from pebbles.rules import apply_rules_blueprint_templates
blueprint_templates = FlaskBlueprint('blueprint_templates', __name__)
blueprint_template_fields = {
'id': fields.String(attribute='id'),
'name': fields.String,
'is_enabled': fields.Boolean,
'plugin': fields.String,
'config': fields.Raw,
'schema': fields.Raw,
'form': fields.Raw,
'allowed_attrs': fields.Raw,
'blueprint_schema': fields.Raw,
'blueprint_form': fields.Raw,
'blueprint_model': fields.Raw
}
class BlueprintTemplateList(restful.Resource):
@auth.login_required
@requires_group_manager_or_admin
@marshal_with(blueprint_template_fields)
def get(self):
user = g.user
query = apply_rules_blueprint_templates(user)
query = query.order_by(BlueprintTemplate.name)
results = []
for blueprint_template in query.all():
plugin = Plugin.query.filter_by(id=blueprint_template.plugin).first()
blueprint_template.schema = plugin.schema
blueprint_template.form = plugin.form
# Due to immutable nature of config field, whole dict needs to be reassigned.
# Issue #444 in github
blueprint_template_config = blueprint_template.config
blueprint_template_config['name'] = blueprint_template.name
blueprint_template.config = blueprint_template_config
results.append(blueprint_template)
return results
@auth.login_required
@requires_admin
def post(self):
form = BlueprintTemplateForm()
if not form.validate_on_submit():
logging.warn("validation error on create blueprint_template")
return form.errors, 422
blueprint_template = BlueprintTemplate()
blueprint_template.name = form.name.data
blueprint_template.plugin = form.plugin.data
config = form.config.data
config.pop('name', None)
blueprint_template.config = config
try:
validate_max_lifetime_template(config) # Validate the maximum lifetime from config
except ValueError:
timeformat_error = {"timeformat error": "pattern should be [days]d [hours]h [minutes]m"}
return timeformat_error, 422
if isinstance(form.allowed_attrs.data, dict): # WTForms can only fetch a dict
blueprint_template.allowed_attrs = form.allowed_attrs.data['allowed_attrs']
blueprint_template = blueprint_schemaform_config(blueprint_template)
db.session.add(blueprint_template)
db.session.commit()
class BlueprintTemplateView(restful.Resource):
parser = reqparse.RequestParser()
parser.add_argument('disable_blueprints', type=bool)
@auth.login_required
@requires_group_manager_or_admin
@marshal_with(blueprint_template_fields)
def get(self, template_id):
args = {'template_id': template_id}
query = apply_rules_blueprint_templates(g.user, args)
blueprint_template = query.first()
if not blueprint_template:
abort(404)
return blueprint_template
@auth.login_required
@requires_admin
def put(self, template_id):
form = BlueprintTemplateForm()
if not form.validate_on_submit():
logging.warn("validation error on update blueprint_template config")
return form.errors, 422
blueprint_template = BlueprintTemplate.query.filter_by(id=template_id).first()
if not blueprint_template:
abort(404)
blueprint_template.name = form.config.data.get('name') or form.name.data
blueprint_template.plugin = form.plugin.data
config = form.config.data
config.pop('name', None)
blueprint_template.config = config
try:
validate_max_lifetime_template(config) # Validate the maximum lifetime from config
except ValueError:
timeformat_error = {"timeformat error": "pattern should be [days]d [hours]h [minutes]m"}
return timeformat_error, 422
if isinstance(form.allowed_attrs.data, dict): # WTForms can only fetch a dict
blueprint_template.allowed_attrs = form.allowed_attrs.data['allowed_attrs']
blueprint_template = blueprint_schemaform_config(blueprint_template)
args = self.parser.parse_args()
blueprint_template = toggle_enable_template(form, args, blueprint_template)
db.session.add(blueprint_template)
db.session.commit()
class BlueprintTemplateCopy(restful.Resource):
@auth.login_required
@requires_admin
def put(self, template_id):
template = BlueprintTemplate.query.get_or_404(template_id)
db.session.expunge(template)
make_transient(template)
template.id = uuid.uuid4().hex
template.name = format("%s - %s" % (template.name, 'Copy'))
db.session.add(template)
db.session.commit()
def toggle_enable_template(form, args, blueprint_template):
"""Logic for activating and deactivating a blueprint template"""
if form.is_enabled.raw_data:
blueprint_template.is_enabled = form.is_enabled.raw_data[0] # WTForms Issue#451
else:
blueprint_template.is_enabled = False
if args.get('disable_blueprints'):
# Disable all associated blueprints
blueprints = blueprint_template.blueprints
for blueprint in blueprints:
blueprint.is_enabled = False
return blueprint_template
def blueprint_schemaform_config(blueprint_template):
"""Generates config,schema and model objects used in schemaform ui component for blueprints"""
plugin = Plugin.query.filter_by(id=blueprint_template.plugin).first()
schema = plugin.schema
blueprint_schema = {'type': 'object', 'title': 'Comment', 'description': 'Description', 'required': ['name', 'description'], 'properties': {}}
config = blueprint_template.config
blueprint_model = {}
allowed_attrs = blueprint_template.allowed_attrs
blueprint_form = allowed_attrs
allowed_attrs = ['name', 'description'] + allowed_attrs
for attr in allowed_attrs:
blueprint_schema['properties'][attr] = schema['properties'][attr]
if attr in ('name', 'description'):
blueprint_model[attr] = ''
else:
blueprint_model[attr] = config[attr]
blueprint_form = [
{
"key": "name",
"type": "textfield",
"placeholder": "Blueprint name"
},
{
"key": "description",
"type": "textarea",
"placeholder": "Blueprint details"
}
] + blueprint_form
blueprint_template.blueprint_schema = blueprint_schema
blueprint_template.blueprint_form = blueprint_form
blueprint_template.blueprint_model = blueprint_model
return blueprint_template
def validate_max_lifetime_template(config):
"""Checks if the maximum lifetime has a valid pattern"""
if 'maximum_lifetime' in config:
max_life_str = str(config['maximum_lifetime'])
if max_life_str:
parse_maximum_lifetime(max_life_str)
|
CSC-IT-Center-for-Science/pouta-blueprints
|
pebbles/views/blueprint_templates.py
|
Python
|
mit
| 7,676
|
from ..workdays import *
from datetime import datetime, timedelta
from time import strptime
import math
import traceback
tests=[]
def test( fn ):
tests.append(fn)
return fn
def runTests():
for t in tests:
print t
try: t()
except Exception as e:
print e
traceback.print_exc()
print
def _parse_date( datestr ):
return datetime(*strptime(datestr, "%Y-%m-%d")[0:5]).date()
def _parse_datetime( datestr ):
if type(datestr) == type(""):
return datetime(*strptime(datestr, "%Y-%m-%d %H:%M")[0:5])
elif type(datestr) == type((1,2)):
return datetime(*datestr)
elif type(datestr) == type(dt.datetime(1900,1,1)):
return datestr
return None
def _is_same_dt( d1, d2, numParts=5 ):
return d1.timetuple()[:numParts] == d2.timetuple()[:numParts]
@test
def shouldEstimateEnd():
def test( d1, d2, total, remaining, dexp ):
d1 = _parse_datetime(d1)
d2 = _parse_datetime(d2)
dexp = _parse_date(dexp)
dres = estimate_end( d1, d2, total, remaining )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 3 ) )
# Monday 2017-03-06
test( "2017-03-06 00:00", "2017-03-07 00:00", 2, 1, "2017-03-08" )
test( "2017-03-06 00:00", "2017-03-08 00:00", 2, 1, "2017-03-10" )
test( "2017-03-06 00:00", "2017-03-09 00:00", 2, 1, "2017-03-12" )
test( "2017-03-06 00:00", "2017-03-10 00:00", 2, 1, "2017-03-14" )
test( "2017-03-06 00:00", "2017-03-13 00:00", 2, 1, "2017-03-20" )
@test
def shouldAdjustStart():
def test( d1, dexp ):
dexp = _parse_datetime(dexp)
dres = adjusted_start( _parse_datetime( d1 ) )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 5 ) )
# Monday 2017-03-06
test( "2017-03-06 08:00", "2017-03-06 08:00" )
test( "2017-03-07 08:00", "2017-03-07 08:00" )
test( "2017-03-08 08:00", "2017-03-08 08:00" )
test( "2017-03-09 08:00", "2017-03-09 08:00" )
test( "2017-03-10 08:00", "2017-03-10 08:00" )
test( "2017-03-11 08:00", "2017-03-13 00:00" )
test( "2017-03-12 08:00", "2017-03-13 00:00" )
@test
def shouldAdjustEnd():
def test( d1, dexp ):
dexp = _parse_datetime(dexp)
dres = adjusted_end( _parse_datetime( d1 ) )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 5 ) )
# Monday 2017-03-06
test( "2017-03-06 08:00", "2017-03-06 08:00" )
test( "2017-03-07 08:00", "2017-03-07 08:00" )
test( "2017-03-08 08:00", "2017-03-08 08:00" )
test( "2017-03-09 08:00", "2017-03-09 08:00" )
test( "2017-03-10 08:00", "2017-03-10 08:00" )
test( "2017-03-11 08:00", "2017-03-10 23:59" )
test( "2017-03-12 08:00", "2017-03-10 23:59" )
@test
def shouldEstimateEndWorkdays():
def test( d1, d2, total, remaining, dexp ):
d1 = _parse_datetime(d1)
d2 = _parse_datetime(d2)
dexp = _parse_datetime(dexp)
dres = estimate_end_workdays( d1, d2, total, remaining )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 3 ) )
# Monday 2017-03-06
# same week
test( "2017-03-06 08:00", "2017-03-07 08:00", 2, 1, "2017-03-08 08:00" )
test( "2017-03-06 08:00", "2017-03-08 08:00", 2, 1, "2017-03-10 08:00" )
# projection spans weekends
test( "2017-03-06 08:00", "2017-03-09 08:00", 2, 1, "2017-03-14 08:00" )
test( "2017-03-06 08:00", "2017-03-10 08:00", 2, 1, "2017-03-16 08:00" )
# a weekend is in the completed time, estimate falls on weekend
# 06 07 08 09 10 w11 w12 13 14 15 16 17 w18 w19 20
test( "2017-03-06 08:00", "2017-03-13 08:00", 2, 1, "2017-03-20 08:00" )
# Start on weekend
test( "2017-03-05 08:00", "2017-03-10 08:00", 2, 1, "2017-03-16 08:00" )
test( "2017-03-04 08:00", "2017-03-10 08:00", 2, 1, "2017-03-16 08:00" )
# Start and now on weekend
test( "2017-03-05 08:00", "2017-03-11 08:00", 2, 1, "2017-03-17 23:59" )
test( "2017-03-04 08:00", "2017-03-12 08:00", 2, 1, "2017-03-17 23:59" )
@test
def shouldEstimateEndWorkdays2():
def test( d1, d2, total, remaining, dexp ):
d1 = _parse_datetime(d1)
d2 = _parse_datetime(d2)
dexp = _parse_datetime(dexp)
dres = estimate_end_workdays( d1, d2, total, remaining )
print "expected: %s, actual %s, %s" % (dexp, dres, _is_same_dt( dres, dexp, 3 ) )
if not _is_same_dt( dres, dexp ):
print " diff:", dres - dexp
# Monday 2017-03-06
d1 = dt.datetime(2017, 03, 06, 8 )
d2 = dt.datetime(2017, 03, 13, 8 )
for done in xrange(1, 22, 5):
dexp = d2 + dt.timedelta( weeks=done )
print done, dt.timedelta( weeks=done ),
test( d1, d2, done+1, done, dexp )
runTests()
|
mmahnic/trac-tickethistory
|
tickethistory/test/workdays_t.py
|
Python
|
mit
| 4,805
|
#! /usr/bin/env python
"""Team Password Manager API
To simplify usage of Team Password Manager API.
You can authenticate with username and password
>>> import tpm
>>> URL = "https://mypasswordmanager.example.com"
>>> USER = 'MyUser'
>>> PASS = 'Secret'
>>> tpmconn = tpm.TpmApiv5(URL, username=USER, password=PASS)
Or with Private/Public Key
>>> pubkey = '3726d93f2a0e5f0fe2cc3a6e9e3ade964b43b07f897d579466c28b7f8ff51cd0'
>>> privkey = '87324bedead51af96a45271d217b8ad5ef3f220da6c078a9bce4e4318729189c'
>>> tpmconn = tpm.TpmApiv5(URL, private_key=privkey, public_key=pubkey)
With the connection object you can use all TPM functions, like list all passwords:
>>> tpmconn.list_passwords()
All API functions from Team Password Manager are included.
see http://teampasswordmanager.com/docs/api/
:copyright: (c) 2021 by Andreas Hubert.
:license: The MIT License (MIT), see LICENSE for more details.
"""
__version__ = '4.1'
import hmac
import hashlib
import time
import requests
import re
import json
import logging
import base64
import os.path
from urllib.parse import quote_plus
# set logger
log = logging.getLogger(__name__)
# disable unsecure SSL warning
requests.packages.urllib3.disable_warnings()
class TPMException(Exception):
pass
class TpmApi(object):
"""Settings needed for the connection to Team Password Manager."""
class ConfigError(Exception):
"""To throw Exception based on wrong Settings."""
def __init__(self, value):
self.value = value
log.critical(value)
def __str__(self):
return repr(self.value)
def __init__(self, api, base_url, kwargs):
"""init thing."""
# Check if API version is not bullshit
REGEXurl = "^" \
"(?:(?:https?)://)" \
"(?:\\S+(?::\\S*)?@)?" \
"(?:" \
"(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])" \
"(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}" \
"(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))" \
"|" \
"(?:(?:[a-z\\u00a1-\\uffff0-9]-*)*[a-z\\u00a1-\\uffff0-9]+)" \
"(?:\\.(?:[a-z\\u00a1-\\uffff0-9]-*)*[a-z\\u00a1-\\uffff0-9]+)*" \
"(?:\\.(?:[a-z\\u00a1-\\uffff]{2,}))?" \
".?" \
")" \
"(?::\\d{2,5})?" \
"(?:[/?#]\\S*)?" \
"$"
self.apiurl = 'api/' + api + '/'
log.debug('Set as apiurl: {}'.format(self.apiurl))
self.api = self.apiurl
# Check if URL is not bullshit
if re.match(REGEXurl, base_url):
self.base_url = base_url + '/index.php/'
log.debug('Set Base URL to {}'.format(self.base_url))
self.url = self.base_url + self.apiurl
log.debug('Set URL to {}'.format(self.url))
else:
raise self.ConfigError('Invalid URL: {}'.format(base_url))
# set headers
self.headers = {'Content-Type': 'application/json; charset=utf-8',
'User-Agent': 'tpm.py/' + __version__
}
log.debug('Set header to {}'.format(self.headers))
# check kwargs for either keys or user credentials
self.private_key = False
self.public_key = False
self.username = False
self.password = False
self.unlock_reason = False
for key in kwargs:
if key == 'private_key':
self.private_key = kwargs[key]
elif key == 'public_key':
self.public_key = kwargs[key]
elif key == 'username':
self.username = kwargs[key]
elif key == 'password':
self.password = kwargs[key]
elif key == 'unlock_reason':
self.unlock_reason = kwargs[key]
if self.private_key is not False and self.public_key is not False and\
self.username is False and self.password is False:
log.debug('Using Private/Public Key authentication.')
elif self.username is not False and self.password is not False and\
self.private_key is False and self.public_key is False:
log.debug('Using Basic authentication.')
else:
raise self.ConfigError('No authentication specified'
' (user/password or private/public key)')
def request(self, path, action, data=''):
"""To make a request to the API."""
# Check if the path includes URL or not.
head = self.base_url
if path.startswith(head):
path = path[len(head):]
path = quote_plus(path, safe='/')
if not path.startswith(self.api):
path = self.api + path
log.debug('Using path {}'.format(path))
# If we have data, convert to JSON
if data:
data = json.dumps(data)
log.debug('Data to sent: {}'.format(data))
# In case of key authentication
if self.private_key and self.public_key:
timestamp = str(int(time.time()))
log.debug('Using timestamp: {}'.format(timestamp))
unhashed = path + timestamp + str(data)
log.debug('Using message: {}'.format(unhashed))
self.hash = hmac.new(str.encode(self.private_key),
msg=unhashed.encode('utf-8'),
digestmod=hashlib.sha256).hexdigest()
log.debug('Authenticating with hash: {}'.format(self.hash))
self.headers['X-Public-Key'] = self.public_key
self.headers['X-Request-Hash'] = self.hash
self.headers['X-Request-Timestamp'] = timestamp
auth = False
# In case of user credentials authentication
elif self.username and self.password:
auth = requests.auth.HTTPBasicAuth(self.username, self.password)
# Set unlock reason
if self.unlock_reason:
self.headers['X-Unlock-Reason'] = self.unlock_reason
log.info('Unlock Reason: {}'.format(self.unlock_reason))
url = head + path
# Try API request and handle Exceptions
try:
if action == 'get':
log.debug('GET request {}'.format(url))
self.req = requests.get(url, headers=self.headers, auth=auth,
verify=False)
elif action == 'post':
log.debug('POST request {}'.format(url))
self.req = requests.post(url, headers=self.headers, auth=auth,
verify=False, data=data)
elif action == 'put':
log.debug('PUT request {}'.format(url))
self.req = requests.put(url, headers=self.headers,
auth=auth, verify=False,
data=data)
elif action == 'delete':
log.debug('DELETE request {}'.format(url))
self.req = requests.delete(url, headers=self.headers,
verify=False, auth=auth)
if self.req.content == b'':
result = None
log.debug('No result returned.')
else:
result = self.req.json()
if 'error' in result and result['error']:
raise TPMException(result['message'])
except requests.exceptions.RequestException as e:
log.critical("Connection error for " + str(e))
raise TPMException("Connection error for " + str(e))
except ValueError as e:
if self.req.status_code == 403:
log.warning(url + " forbidden")
raise TPMException(url + " forbidden")
elif self.req.status_code == 404:
log.warning(url + " forbidden")
raise TPMException(url + " not found")
else:
message = ('{}: {} {}'.format(e, self.req.url, self.req.text))
log.debug(message)
raise ValueError(message)
return result
def post(self, path, data=''):
"""For post based requests."""
return self.request(path, 'post', data)
def get(self, path):
"""For get based requests."""
return self.request(path, 'get')
def put(self, path, data=''):
"""For put based requests."""
return self.request(path, 'put', data)
def delete(self, path):
"""For delete based requests."""
self.request(path, 'delete')
def get_collection(self, path):
"""To get pagewise data."""
while True:
items = self.get(path)
req = self.req
for item in items:
yield item
if req.links and req.links['next'] and\
req.links['next']['rel'] == 'next':
path = req.links['next']['url']
else:
break
def collection(self, path):
"""To return all items generated by get collection."""
data = []
for item in self.get_collection(path):
data.append(item)
return data
# From now on, Functions that work that way in all API Versions.
# http://teampasswordmanager.com/docs/api-projects/#list_projects
def list_projects(self):
"""List projects."""
log.debug('List all projects.')
return self.collection('projects.json')
def list_projects_archived(self):
"""List archived projects."""
log.debug('List all archived projects.')
return self.collection('projects/archived.json')
def list_projects_favorite(self):
"""List favorite projects."""
log.debug('List all favorite projects.')
return self.collection('projects/favorite.json')
def list_projects_search(self, searchstring):
"""List projects with searchstring."""
log.debug('List all projects with: {}'.format(searchstring))
return self.collection('projects/search/{}.json'.format(quote_plus(searchstring)))
def show_project(self, ID):
"""Show a project."""
# http://teampasswordmanager.com/docs/api-projects/#show_project
log.debug('Show project info: {}'.format(ID))
return self.get('projects/{}.json'.format(ID))
def list_passwords_of_project(self, ID):
"""List passwords of project."""
# http://teampasswordmanager.com/docs/api-projects/#list_pwds_prj
log.debug('List passwords of project: {}'.format(ID))
return self.collection('projects/{}/passwords.json'.format(ID))
def list_user_access_on_project(self, ID):
"""List users who can access a project."""
# http://teampasswordmanager.com/docs/api-projects/#list_users_prj
log.debug('List User access on project: {}'.format(ID))
return self.collection('projects/{}/security.json'.format(ID))
def create_project(self, data):
"""Create a project."""
# http://teampasswordmanager.com/docs/api-projects/#create_project
log.info('Create project: {}'.format(data))
NewID = self.post('projects.json', data).get('id')
log.info('Project has been created with ID {}'.format(NewID))
return NewID
def update_project(self, ID, data):
"""Update a project."""
# http://teampasswordmanager.com/docs/api-projects/#update_project
log.info('Update project {} with {}'.format(ID, data))
self.put('projects/{}.json'.format(ID), data)
def change_parent_of_project(self, ID, NewParentID):
"""Change parent of project."""
# http://teampasswordmanager.com/docs/api-projects/#change_parent
log.info('Change parrent for project {} to {}'.format(ID, NewParentID))
data = {'parent_id': NewParentID}
self.put('projects/{}/change_parent.json'.format(ID), data)
def update_security_of_project(self, ID, data):
"""Update security of project."""
# http://teampasswordmanager.com/docs/api-projects/#update_project_security
log.info('Update project {} security {}'.format(ID, data))
self.put('projects/{}/security.json'.format(ID), data)
def archive_project(self, ID):
"""Archive a project."""
# http://teampasswordmanager.com/docs/api-projects/#arch_unarch_project
log.info('Archive project {}'.format(ID))
self.put('projects/{}/archive.json'.format(ID))
def unarchive_project(self, ID):
"""Un-Archive a project."""
# http://teampasswordmanager.com/docs/api-projects/#arch_unarch_project
log.info('Unarchive project {}'.format(ID))
self.put('projects/{}/unarchive.json'.format(ID))
def delete_project(self, ID):
"""Delete a project."""
# http://teampasswordmanager.com/docs/api-projects/#delete_project
log.info('Delete project {}'.format(ID))
self.delete('projects/{}.json'.format(ID))
# http://teampasswordmanager.com/docs/api-passwords/#list_passwords
def list_passwords(self):
"""List passwords."""
log.debug('List all passwords.')
return self.collection('passwords.json')
def list_passwords_archived(self):
"""List archived passwords."""
log.debug('List archived passwords.')
return self.collection('passwords/archived.json')
def list_passwords_favorite(self):
"""List favorite passwords."""
log.debug('List favorite spasswords.')
return self.collection('passwords/favorite.json')
def list_passwords_search(self, searchstring):
"""List passwords with searchstring."""
log.debug('List all passwords with: {}'.format(searchstring))
return self.collection('passwords/search/{}.json'.format(quote_plus(searchstring)))
def show_password(self, ID):
"""Show password."""
# http://teampasswordmanager.com/docs/api-passwords/#show_password
log.info('Show password info: {}'.format(ID))
return self.get('passwords/{}.json'.format(ID))
def list_user_access_on_password(self, ID):
"""List users who can access a password."""
# http://teampasswordmanager.com/docs/api-passwords/#list_users_pwd
log.debug('List user access on password {}'.format(ID))
return self.collection('passwords/{}/security.json'.format(ID))
def create_password(self, data):
"""Create a password."""
# http://teampasswordmanager.com/docs/api-passwords/#create_password
log.info('Create new password {}'.format(data))
NewID = self.post('passwords.json', data).get('id')
log.info('Password has been created with ID {}'.format(NewID))
return NewID
def update_password(self, ID, data):
"""Update a password."""
# http://teampasswordmanager.com/docs/api-passwords/#update_password
log.info('Update Password {} with {}'.format(ID, data))
self.put('passwords/{}.json'.format(ID), data)
def update_security_of_password(self, ID, data):
"""Update security of a password."""
# http://teampasswordmanager.com/docs/api-passwords/#update_security_password
log.info('Update security of password {} with {}'.format(ID, data))
self.put('passwords/{}/security.json'.format(ID), data)
def update_custom_fields_of_password(self, ID, data):
"""Update custom fields definitions of a password."""
# http://teampasswordmanager.com/docs/api-passwords/#update_cf_password
log.info('Update custom fields of password {} with {}'.format(ID, data))
self.put('passwords/{}/custom_fields.json'.format(ID), data)
def delete_password(self, ID):
"""Delete a password."""
# http://teampasswordmanager.com/docs/api-passwords/#delete_password
log.info('Delete password {}'.format(ID))
self.delete('passwords/{}.json'.format(ID))
def lock_password(self, ID):
"""Lock a password."""
# http://teampasswordmanager.com/docs/api-passwords/#lock_password
log.info('Lock password {}'.format(ID))
self.put('passwords/{}/lock.json'.format(ID))
def unlock_password(self, ID, reason):
"""Unlock a password."""
# http://teampasswordmanager.com/docs/api-passwords/#unlock_password
log.info('Unlock password {}, Reason: {}'.format(ID, reason))
self.unlock_reason = reason
self.put('passwords/{}/unlock.json'.format(ID))
def list_mypasswords(self):
"""List my passwords."""
# http://teampasswordmanager.com/docs/api-my-passwords/#list_passwords
log.debug('List MyPasswords')
return self.collection('my_passwords.json')
def list_mypasswords_search(self, searchstring):
"""List my passwords with searchstring."""
# http://teampasswordmanager.com/docs/api-my-passwords/#list_passwords
log.debug('List MyPasswords with {}'.format(searchstring))
return self.collection('my_passwords/search/{}.json'.format(quote_plus(searchstring)))
def show_mypassword(self, ID):
"""Show my password."""
# http://teampasswordmanager.com/docs/api-my-passwords/#show_password
log.debug('Show MyPassword {}'.format(ID))
return self.get('my_passwords/{}.json'.format(ID))
def create_mypassword(self, data):
"""Create my password."""
# http://teampasswordmanager.com/docs/api-my-passwords/#create_password
log.info('Create MyPassword with {}'.format(data))
NewID = self.post('my_passwords.json', data).get('id')
log.info('MyPassword has been created with {}'.format(NewID))
return NewID
def update_mypassword(self, ID, data):
"""Update my password."""
# http://teampasswordmanager.com/docs/api-my-passwords/#update_password
log.info('Update MyPassword {} with {}'.format(ID, data))
self.put('my_passwords/{}.json'.format(ID), data)
def delete_mypassword(self, ID):
"""Delete my password."""
# http://teampasswordmanager.com/docs/api-my-passwords/#delete_password
log.info('Delete password {}'.format(ID))
self.delete('my_passwords/{}.json'.format(ID))
def set_favorite_password(self, ID):
"""Set a password as favorite."""
# http://teampasswordmanager.com/docs/api-favorites/#set_fav
log.info('Set password {} as favorite'.format(ID))
self.post('favorite_passwords/{}.json'.format(ID))
def unset_favorite_password(self, ID):
"""Unet a password as favorite."""
# http://teampasswordmanager.com/docs/api-favorites/#del_fav
log.info('Unset password {} as favorite'.format(ID))
self.delete('favorite_passwords/{}.json'.format(ID))
def set_favorite_project(self, ID):
"""Set a project as favorite."""
# http://teampasswordmanager.com/docs/api-favorites/#set_fav
log.info('Set project {} as favorite'.format(ID))
self.post('favorite_project/{}.json'.format(ID))
def unset_favorite_project(self, ID):
"""Unet a project as favorite."""
# http://teampasswordmanager.com/docs/api-favorites/#del_fav
log.info('Unset project {} as favorite'.format(ID))
self.delete('favorite_project/{}.json'.format(ID))
def list_users(self):
"""List users."""
# http://teampasswordmanager.com/docs/api-users/#list_users
log.debug('List users')
return self.collection('users.json')
def show_user(self, ID):
"""Show a user."""
# http://teampasswordmanager.com/docs/api-users/#show_user
log.debug('Show user {}'.format(ID))
return self.get('users/{}.json'.format(ID))
def show_me(self):
"""Show me."""
# http://teampasswordmanager.com/docs/api-users/#show_me
log.debug('Show Info about own user')
return self.get('users/me.json')
def who_am_i(self):
"""Who am I."""
return self.show_me()
def create_user(self, data):
"""Create a User."""
# http://teampasswordmanager.com/docs/api-users/#create_user
log.info('Create user with {}'.format(data))
NewID = self.post('users.json', data).get('id')
log.info('User has been created with ID {}'.format(NewID))
return NewID
def update_user(self, ID, data):
"""Update a User."""
# http://teampasswordmanager.com/docs/api-users/#update_user
log.info('Update user {} with {}'.format(ID, data))
self.put('users/{}.json'.format(ID), data)
def change_user_password(self, ID, data):
"""Change password of a User."""
# http://teampasswordmanager.com/docs/api-users/#change_password
log.info('Change user {} password'.format(ID))
self.put('users/{}/change_password.json'.format(ID), data)
def activate_user(self, ID):
"""Activate a User."""
# http://teampasswordmanager.com/docs/api-users/#activate_deactivate
log.info('Activate user {}'.format(ID))
self.put('users/{}/activate.json'.format(ID))
def deactivate_user(self, ID):
"""Dectivate a User."""
# http://teampasswordmanager.com/docs/api-users/#activate_deactivate
log.info('Deactivate user {}'.format(ID))
self.put('users/{}/deactivate.json'.format(ID))
def convert_user_to_ldap(self, ID, DN):
"""Convert a normal user to a LDAP user."""
# http://teampasswordmanager.com/docs/api-users/#convert_to_ldap
data = {'login_dn': DN}
log.info('Convert User {} to LDAP DN {}'.format(ID, DN))
self.put('users/{}/convert_to_ldap.json'.format(ID), data)
def convert_ldap_user_to_normal(self, ID):
"""Convert a LDAP user to a normal user."""
log.info('Convert User {} from LDAP to normal user'.format(ID))
self.put('users/{}/convert_to_normal.json'.format(ID))
def delete_user(self, ID):
"""Delete a user."""
# http://teampasswordmanager.com/docs/api-users/#delete_user
log.info('Delete user {}'.format(ID))
self.delete('users/{}.json'.format(ID))
def list_groups(self):
"""List Groups."""
# http://teampasswordmanager.com/docs/api-groups/#list_groups
log.debug('List groups')
return self.collection('groups.json')
def show_group(self, ID):
"""Show a Group."""
# http://teampasswordmanager.com/docs/api-groups/#show_group
log.debug('Show group {}'.format(ID))
return self.get('groups/{}.json'.format(ID))
def create_group(self, data):
"""Create a Group."""
# http://teampasswordmanager.com/docs/api-groups/#create_group
log.info('Create group with {}'.format(data))
NewID = self.post('groups.json', data).get('id')
log.info('Group has been created with ID {}'.format(NewID))
return NewID
def update_group(self, ID, data):
"""Update a Group."""
# http://teampasswordmanager.com/docs/api-groups/#update_group
log.info('Update group {} with {}'.format(ID, data))
self.put('groups/{}.json'.format(ID), data)
def add_user_to_group(self, GroupID, UserID):
"""Add a user to a group."""
# http://teampasswordmanager.com/docs/api-groups/#add_user
log.info('Add User {} to Group {}'.format(UserID, GroupID))
self.put('groups/{}/add_user/{}.json'.format(GroupID, UserID))
def delete_user_from_group(self, GroupID, UserID):
"""Delete a user from a group."""
# http://teampasswordmanager.com/docs/api-groups/#del_user
log.info('Delete user {} from group {}'.format(UserID, GroupID))
self.put('groups/{}/delete_user/{}.json'.format(GroupID, UserID))
def delete_group(self, ID):
"""Delete a group."""
# http://teampasswordmanager.com/docs/api-groups/#delete_group
log.info('Delete group {}'.format(ID))
self.delete('groups/{}.json'.format(ID))
def generate_password(self):
"""Generate a new random password."""
# http://teampasswordmanager.com/docs/api-passwords-generator/
log.debug('Generate new password')
return self.get('generate_password.json')
def get_version(self):
"""Get Version Information."""
# http://teampasswordmanager.com/docs/api-version/
log.debug('Get version information')
return self.get('version.json')
def get_latest_version(self):
"""Check for latest version."""
# http://teampasswordmanager.com/docs/api-version/
log.debug('Get latest version')
return self.get('version/check_latest.json')
def up_to_date(self):
"""Check if Team Password Manager is up to date."""
VersionInfo = self.get_latest_version()
CurrentVersion = VersionInfo.get('version')
LatestVersion = VersionInfo.get('latest_version')
if CurrentVersion == LatestVersion:
log.info('TeamPasswordManager is up-to-date!')
log.debug('Current Version: {} Latest Version: {}'.format(LatestVersion, LatestVersion))
return True
else:
log.warning('TeamPasswordManager is not up-to-date!')
log.debug('Current Version: {} Latest Version: {}'.format(LatestVersion, LatestVersion))
return False
class TpmApiv3(TpmApi):
"""API v3 based class."""
def __init__(self, url, **kwargs):
super(TpmApiv3, self).__init__('v3', url, kwargs)
"""From now on, Functions that only work with API v3."""
class TpmApiv4(TpmApi):
"""API v4 based class."""
def __init__(self, url, **kwargs):
super(TpmApiv4, self).__init__('v4', url, kwargs)
"""From now on, Functions that only work with API v4."""
def list_subprojects(self, ID):
"""List subprojects."""
# http://teampasswordmanager.com/docs/api-projects/#list_subprojects
log.debug('List subprojects of {}'.format(ID))
return self.collection('projects/{}/subprojects.json'.format(ID))
def list_subprojects_action(self, ID, action):
"""List subprojects with allowed action."""
log.debug('List subprojects of {} with action: {}'.format(ID, action))
return self.collection('projects/{}/subprojects/{}.json'.format(ID, action))
class TpmApiv5(TpmApiv4):
"""API v5 based class."""
def __init__(self, url, **kwargs):
super(TpmApiv4, self).__init__('v5', url, kwargs)
"""From now on, Functions that only work with API v5."""
def list_project_files(self, ID):
"""List files of a project."""
return self.collection('projects/{}/files.json'.format(ID))
def upload_project_file(self, ID, file, **kwargs):
"""Upload a file to a project."""
if os.path.isfile(file):
file_data = open(file, "rb")
encoded = base64.b64encode(file_data.read())
data = { "file_data_base64": encoded.decode('ascii'),
"file_name": os.path.basename(file)
}
if 'notes' in kwargs:
data['notes'] = kwargs['notes']
NewID = self.post('projects/{}/upload.json'.format(ID), data).get('id')
log.info('File has been uploaded with ID {}'.format(NewID))
return NewID
else:
raise Exception("File not found: {}".format(file))
def archive_password(self, ID):
"""Archive a password."""
# http://teampasswordmanager.com/docs/api-passwords/#arch_unarch_password
log.info('Archive password {}'.format(ID))
self.put('passwords/{}/archive.json'.format(ID))
def unarchive_password(self, ID):
"""Un-Archive a project."""
# http://teampasswordmanager.com/docs/api-passwords/#arch_unarch_password
log.info('Unarchive password {}'.format(ID))
self.put('passwords/{}/unarchive.json'.format(ID))
def move_password(self, ID, PROJECT_ID):
"""Move a password to another project."""
# http://teampasswordmanager.com/docs/api-passwords/#move_password
log.info('Move password {} to Project {}'.format(ID, PROJECT_ID))
self.put('passwords/{}/move.json'.format(ID), data={ "project_id": PROJECT_ID })
def list_password_files(self, ID):
"""List files of a password."""
# http://teampasswordmanager.com/docs/api-passwords/#list_files
log.info('List files of password: {}'.format(ID))
return self.collection('passwords/{}/files.json'.format(ID))
def upload_password_file(self, ID, file, **kwargs):
"""Upload a file to a password."""
if os.path.isfile(file):
file_data = open(file, "rb")
encoded = base64.b64encode(file_data.read())
data = { "file_data_base64": encoded.decode('ascii'),
"file_name": os.path.basename(file)
}
if 'notes' in kwargs:
data['notes'] = kwargs['notes']
NewID = self.post('passwords/{}/upload.json'.format(ID), data).get('id')
log.info('File has been uploaded with ID {}'.format(NewID))
return NewID
else:
raise Exception("File not found: {}".format(file))
def move_mypassword(self, ID, PROJECT_ID):
"""Move a mypassword to another project."""
# https://teampasswordmanager.com/docs/api-my-passwords/#move_password
log.info('Move my_password {} to Project {}'.format(ID, PROJECT_ID))
return self.put('my_passwords/{}/move.json'.format(ID), data={ "project_id": PROJECT_ID }).get('id')
def show_file_info(self, ID):
"""Show info of a file."""
# https://teampasswordmanager.com/docs/api-files/#show_file
log.info('Show info of file with ID: {}'.format(ID))
return self.get('files/{}.json'.format(ID))
def update_file_notes(self, ID, NOTES):
"""Update the notes on a file."""
# https://teampasswordmanager.com/docs/api-files/#update_file
log.info('Update notes on file {} to: {}'.format(ID, NOTES))
self.put('files/{}.json'.format(ID), data={ "notes": NOTES })
def max_upload_file_size(self):
"""Show max upload file size."""
# https://teampasswordmanager.com/docs/api-files/#max_upload_file_size
log.info('Show max_upload_file_size')
return self.get('files/max_upload_file_size.json')
def uploads_folder_info(self):
"""Show uploads folder info."""
# https://teampasswordmanager.com/docs/api-files/#uploads_folder
log.info('Show uploads_folder_info')
return self.get('files/uploads_folder_info.json')
def download_file(self, ID):
"""Get the content of a file."""
# https://teampasswordmanager.com/docs/api-files/#download_file
log.info('Download file with ID: {}'.format(ID))
return self.get('files/download/{}.json'.format(ID))
def delete_file(self, ID):
"""Delete a file."""
# https://teampasswordmanager.com/docs/api-files/#delete_file
log.info('Delete file {}'.format(ID))
self.delete('files/{}.json'.format(ID))
def create_user_ldap(self, data):
"""Create a LDAP User."""
# http://teampasswordmanager.com/docs/api-users/#create_user_ldap
log.info('Create LDAP user with {}'.format(data))
NewID = self.post('users_ldap.json', data).get('id')
log.info('LDAP User has been created with ID {}'.format(NewID))
return NewID
def create_user_saml(self, data):
"""Create a SAML User."""
# http://teampasswordmanager.com/docs/api-users/#create_user_saml
log.info('Create SAML user with {}'.format(data))
NewID = self.post('users_saml.json', data).get('id')
log.info('SAML User has been created with ID {}'.format(NewID))
return NewID
def convert_user_to_ldap(self, ID, DN, SERVER_ID):
"""Convert a normal user to a LDAP user."""
# http://teampasswordmanager.com/docs/api-users/#convert_to_ldap
data = {'login_dn': DN, "ldap_server_id": SERVER_ID}
log.info('Convert User {} to LDAP DN {} at Server {}'.format(ID, DN, SERVER_ID))
self.put('users/{}/convert_to_ldap.json'.format(ID), data)
def convert_user_to_saml(self, ID):
"""Convert a normal user to a SAML user."""
# http://teampasswordmanager.com/docs/api-users/#convert_to_saml
log.info('Convert User {} to SAML'.format(ID))
self.put('users/{}/convert_to_saml.json'.format(ID))
|
peshay/tpm
|
tpm.py
|
Python
|
mit
| 32,755
|
# -*- coding: utf-8 -*-
import ast
import os
import requests
import models
from config import config, sqla
from gevent.pool import Pool
from helpers import random_str, down
base_path = config.get('photo', 'path')
base_path = os.path.join(base_path, 'celebrity')
cookies = {
'bid': ''
}
def create_down(str_urls, douban_id, category):
urls = ast.literal_eval(str_urls or "[]")
path = os.path.join(base_path, category)
for url in urls:
filename = str(douban_id) + '_' + url.split('/')[-1].strip('?')
cookies['bid'] = random_str(11)
down(url, cookies, path, filename)
def create_requests_and_save_datas(douban_id):
session = sqla['session']
cookies['bid'] = random_str(11)
celebrity = session.query(models.Celebrity).filter_by(
douban_id=douban_id
).one()
cover_url = celebrity.cover
thumbnail_cover_url = celebrity.thumbnail_cover
photos_url = celebrity.photos
thumbnail_photos_url = celebrity.thumbnail_photos
down(
cover_url,
cookies,
os.path.join(base_path, 'cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?')
)
down(
thumbnail_cover_url,
cookies,
os.path.join(base_path, 'thumbnail_cover'),
str(douban_id)+'_'+cover_url.split('/')[-1].strip('?')
)
create_down(photos_url, douban_id, 'photos')
create_down(thumbnail_photos_url, douban_id, 'thumbnail_photos')
def task(douban_ids, pool_number):
pool = Pool(pool_number)
for douban_id in douban_ids:
pool.spawn(
create_requests_and_save_datas,
douban_id=douban_id
)
pool.join()
|
billvsme/videoSpider
|
webs/douban/tasks/down_celebrity_images.py
|
Python
|
mit
| 1,702
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
# Copyright 2012 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:module: watchdog.observers.inotify
:synopsis: ``inotify(7)`` based emitter implementation.
:author: Sebastien Martini <seb@dbzteam.org>
:author: Luke McCarthy <luke@iogopro.co.uk>
:author: yesudeep@google.com (Yesudeep Mangalapilly)
:author: Tim Cuthbertson <tim+github@gfxmonk.net>
:platforms: Linux 2.6.13+.
.. ADMONITION:: About system requirements
Recommended minimum kernel version: 2.6.25.
Quote from the inotify(7) man page:
"Inotify was merged into the 2.6.13 Linux kernel. The required library
interfaces were added to glibc in version 2.4. (IN_DONT_FOLLOW,
IN_MASK_ADD, and IN_ONLYDIR were only added in version 2.5.)"
Therefore, you must ensure the system is running at least these versions
appropriate libraries and the kernel.
.. ADMONITION:: About recursiveness, event order, and event coalescing
Quote from the inotify(7) man page:
If successive output inotify events produced on the inotify file
descriptor are identical (same wd, mask, cookie, and name) then they
are coalesced into a single event if the older event has not yet been
read (but see BUGS).
The events returned by reading from an inotify file descriptor form
an ordered queue. Thus, for example, it is guaranteed that when
renaming from one directory to another, events will be produced in
the correct order on the inotify file descriptor.
...
Inotify monitoring of directories is not recursive: to monitor
subdirectories under a directory, additional watches must be created.
This emitter implementation therefore automatically adds watches for
sub-directories if running in recursive mode.
Some extremely useful articles and documentation:
.. _inotify FAQ: http://inotify.aiken.cz/?section=inotify&page=faq&lang=en
.. _intro to inotify: http://www.linuxjournal.com/article/8478
"""
from __future__ import with_statement
import os
import threading
from .inotify_buffer import InotifyBuffer
from watchdog.observers.api import (
EventEmitter,
BaseObserver,
DEFAULT_EMITTER_TIMEOUT,
DEFAULT_OBSERVER_TIMEOUT
)
from watchdog.events import (
DirDeletedEvent,
DirModifiedEvent,
DirMovedEvent,
DirCreatedEvent,
FileDeletedEvent,
FileModifiedEvent,
FileMovedEvent,
FileCreatedEvent,
generate_sub_moved_events,
generate_sub_created_events,
)
from watchdog.utils import unicode_paths
class InotifyEmitter(EventEmitter):
"""
inotify(7)-based event emitter.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
EventEmitter.__init__(self, event_queue, watch, timeout)
self._lock = threading.Lock()
self._inotify = None
def on_thread_start(self):
path = unicode_paths.encode(self.watch.path)
self._inotify = InotifyBuffer(path, self.watch.is_recursive)
def on_thread_stop(self):
if self._inotify:
self._inotify.close()
def queue_events(self, timeout, full_events=False):
#If "full_events" is true, then the method will report unmatched move events as seperate events
#This behavior is by default only called by a InotifyFullEmitter
with self._lock:
event = self._inotify.read_event()
if event is None:
return
if isinstance(event, tuple):
move_from, move_to = event
src_path = self._decode_path(move_from.src_path)
dest_path = self._decode_path(move_to.src_path)
cls = DirMovedEvent if move_from.is_directory else FileMovedEvent
self.queue_event(cls(src_path, dest_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
self.queue_event(DirModifiedEvent(os.path.dirname(dest_path)))
if move_from.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_moved_events(src_path, dest_path):
self.queue_event(sub_event)
return
src_path = self._decode_path(event.src_path)
if event.is_moved_to:
if (full_events):
cls = DirMovedEvent if event.is_directory else FileMovedEvent
self.queue_event(cls(None, src_path))
else:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
if event.is_directory and self.watch.is_recursive:
for sub_event in generate_sub_created_events(src_path):
self.queue_event(sub_event)
elif event.is_attrib:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_modify:
cls = DirModifiedEvent if event.is_directory else FileModifiedEvent
self.queue_event(cls(src_path))
elif event.is_delete or (event.is_moved_from and not full_events):
cls = DirDeletedEvent if event.is_directory else FileDeletedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
elif event.is_moved_from and full_events:
cls = DirMovedEvent if event.is_directory else FileMovedEvent
self.queue_event(cls(src_path, None))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
elif event.is_create:
cls = DirCreatedEvent if event.is_directory else FileCreatedEvent
self.queue_event(cls(src_path))
self.queue_event(DirModifiedEvent(os.path.dirname(src_path)))
def _decode_path(self, path):
""" Decode path only if unicode string was passed to this emitter. """
if isinstance(self.watch.path, bytes):
return path
return unicode_paths.decode(path)
class InotifyFullEmitter(InotifyEmitter):
"""
inotify(7)-based event emitter. By default this class produces move events even if they are not matched
Such move events will have a ``None`` value for the unmatched part.
:param event_queue:
The event queue to fill with events.
:param watch:
A watch object representing the directory to monitor.
:type watch:
:class:`watchdog.observers.api.ObservedWatch`
:param timeout:
Read events blocking timeout (in seconds).
:type timeout:
``float``
"""
def __init__(self, event_queue, watch, timeout=DEFAULT_EMITTER_TIMEOUT):
InotifyEmitter.__init__(self, event_queue, watch, timeout)
def queue_events(self, timeout, events=True):
InotifyEmitter.queue_events(self, timeout, full_events=events)
class InotifyObserver(BaseObserver):
"""
Observer thread that schedules watching directories and dispatches
calls to event handlers.
"""
def __init__(self, timeout=DEFAULT_OBSERVER_TIMEOUT, generate_full_events=False):
if (generate_full_events):
BaseObserver.__init__(self, emitter_class=InotifyFullEmitter, timeout=timeout)
else:
BaseObserver.__init__(self, emitter_class=InotifyEmitter,
timeout=timeout)
|
wandb/client
|
wandb/vendor/watchdog/observers/inotify.py
|
Python
|
mit
| 8,528
|
# Python3
from solution1 import multiplicationTable as f
qa = [
(5,
[[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
[4, 8, 12, 16, 20],
[5, 10, 15, 20, 25]]),
(2,
[[1, 2],
[2, 4]]),
(4,
[[1, 2, 3, 4],
[2, 4, 6, 8],
[3, 6, 9, 12],
[4, 8, 12, 16]]),
(10,
[[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
[ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20],
[ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30],
[ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40],
[ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50],
[ 6, 12, 18, 24, 30, 36, 42, 48, 54, 60],
[ 7, 14, 21, 28, 35, 42, 49, 56, 63, 70],
[ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80],
[ 9, 18, 27, 36, 45, 54, 63, 72, 81, 90],
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100]]),
(15,
[[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
[ 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30],
[ 3, 6, 9, 12, 15, 18, 21, 24, 27, 30, 33, 36, 39, 42, 45],
[ 4, 8, 12, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60],
[ 5, 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60, 65, 70, 75],
[ 6, 12, 18, 24, 30, 36, 42, 48, 54, 60, 66, 72, 78, 84, 90],
[ 7, 14, 21, 28, 35, 42, 49, 56, 63, 70, 77, 84, 91, 98, 105],
[ 8, 16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104, 112, 120],
[ 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 99, 108, 117, 126, 135],
[10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120, 130, 140, 150],
[11, 22, 33, 44, 55, 66, 77, 88, 99, 110, 121, 132, 143, 154, 165],
[12, 24, 36, 48, 60, 72, 84, 96, 108, 120, 132, 144, 156, 168, 180],
[13, 26, 39, 52, 65, 78, 91, 104, 117, 130, 143, 156, 169, 182, 195],
[14, 28, 42, 56, 70, 84, 98, 112, 126, 140, 154, 168, 182, 196, 210],
[15, 30, 45, 60, 75, 90, 105, 120, 135, 150, 165, 180, 195, 210, 225]])
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
print(' [ok]')
print(' output:', ans)
print()
|
RevansChen/online-judge
|
Codefights/arcade/python-arcade/level-5/34.Multiplication-Table/Python/test.py
|
Python
|
mit
| 2,320
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Ver 18 - 15 November 2017 -
import time
import serial
import string
import sys
import mysql.connector
from mysql.connector import errorcode, pooling
from db import *
import datetime
#from threading import Thread
import multiprocessing as mp
from multiprocessing import Queue
from multiprocessing.managers import SyncManager
from os import system, devnull
from subprocess import call, STDOUT
from threading import Thread
from time import sleep
#import queue
ctrlStr = "*../"
HOST = ''
PORT0 = 5011
PORT1 = 5012
PORT2 = 5013
PORT3 = 5014
AUTHKEY = str("123456").encode("utf-8")
def output(o, x):
print(str(str(o) + " " + str(datetime.datetime.now().time())[:8]) + " "+ str(x))
sys.stdout.flush()
# -- DB Connection ---------------------------
try:
db = mysql.connector.connect(**config)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
output("DB", "Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
output("DB", "Database does not exists")
else:
output(err)
else:
output("PYSERIAL","Start procedure")
db.commit()
# -- END DB Connection ---------------------------
# -- Open Serial to the Coordinator---------------
serCoord = serial.Serial('/dev/ttymxc3', 115200, timeout=10)
#serCoord = serial.Serial('COM5', 115200, timeout=5)
serCoord.timeout = 10
serCoord.setDTR(False)
time.sleep(1)
# toss any data already received, see
serCoord.flushInput()
# -- End Open Serial to the Coordinator-----------
#-----------------------------
# Global Variable declaration
#-----------------------------
endSerialChars = b"\r\n"
global readSerial
global serialBuffer
pnum = 5 #number of values to send for each sensor
# coordinator commands
INString = "IN" # to send Node data to the coordinator
ISString = "IS" # to send Sensor data to the coordinator
IXString = "IX" # to send Address data to the coordinator
IAString = "IA" # to send Actuators to the coordinator
IMString = "IM" # to send Methods to the coordinator
CommExecutedTrue = b"CX1\r\n"
CommExecutedFalse = b"CX0\r\n"
CommExecutedTrueX = b"CX1"
CommExecutedFalseX = b"CX0"
CommNotExecuted = b"X"
#-----------------------------
# End Global Variable declaration
#-----------------------------
# Gpio pin manager
class Gpio:
def __init__(self):
self.gpios = ["55", "57"]
self.gpioval = [0, 0]
self.gpiodir = [0, 0]
self.current = 0
self.OUTPUT = 1
self.INPUT = 0
self.HIGH = 1
self.LOW = 0
for num in self.gpios:
try:
with open("/sys/class/gpio/export", "w") as create:
create.write(num)
with open("/sys/class/gpio/gpio" + self.gpios[current] + "/value", "r") as reads:
self.gpioval[self.current] = reads.read()
with open("/sys/class/gpio/gpio" + self.gpios[current] + "/direction", "r") as readdir:
self.gpiodir[self.current] = (1 if "out" in readdir.read() else 0)
self.current += 1
except:
sleep(0.000001)
def pinMode(self, pin=0, direction=0):
try:
gpio = self.gpios[int(pin)]
if int(direction) != self.gpiodir[pin]:
with open("/sys/class/gpio/gpio" + gpio + "/direction", "w") as writer:
writer.write("in" if direction < 1 else "out")
self.gpiodir[pin] = (0 if direction < 1 else 1)
return True
except ValueError:
output("PYSERIAL","ERROR: pinMode, value inserted wasn't an int")
return False
except:
output("PYSERIAL","ERROR: pinMode, error using pinMode")
return False
def digitalWrite(self, pin=2, value=0):
try:
gpio = self.gpios[int(pin)]
if self.gpiodir[pin] != 1:
with open("/sys/class/gpio/gpio" + gpio + "/direction", "w") as re:
re.write("out")
self.gpiodir[pin] = 1
if self.gpioval[pin] != int(value):
with open("/sys/class/gpio/gpio" + gpio + "/value", "w") as writes:
writes.write("0" if value < 1 else "1")
self.gpioval[pin] = (0 if value < 1 else 1)
return True
except ValueError:
output("PYSERIAL","ERROR: digitalWrite, value inserted wasn't an int")
return False
except:
output("PYSERIAL","ERROR: digitalWrite, error running")
return False
def digitalRead(self, pin=2):
try:
gpio = self.gpios[int(pin)]
if self.gpiodir[pin] != 0:
with open("/sys/class/gpio/gpio" + gpio + "/direction", "w") as re:
re.write("in")
self.gpiodir[pin] = 0
with open("/sys/class/gpio/gpio" + gpio + "/value", "r") as reader:
self.gpioval[pin] = int(reader.read().replace('\n', ''))
return self.gpioval[pin]
except ValueError:
output("PYSERIAL","ERROR: digitalRead, value inserted wasn't an int")
return -1
except:
output("PYSERIAL","ERROR: digitalRead, error running")
return -1
#-- function to extract integer from strings
def parseint(string):
return int(''.join([x for x in string if x.isdigit()]))
def log(t, m):
#curLog = db.cursor()
sql = "insert into tblog (type,msg) VALUES (%s, %s)"
#try:
#curLog.execute(sql, (t,m))
#db.commit()
#curLog.close()
#except:
#raise
#curLog.close()
def printTime():
now = datetime.datetime.now()
print(now.strftime("%H %M %S %f"))
def checkInit():
# check Init
sql = "SELECT pvalue,pindex FROM tbparam WHERE ptype = 'I'"
cur.execute(sql)
for (pvalue,pindex) in cur:
i = int("{}".format(pindex))
if i == 1:
output ("PYSERIAL","Initialize Coordinator")
sql = "UPDATE tbparam SET pvalue = 0 WHERE ptype = 'I'"
cur.execute(sql)
db.commit()
cur.close
initCoordinator()
break
sys.stdout.flush()
# end check Init
#-- Send Init data to the Coordinator --#
def initCoordinator():
#printTime()
output ("PYSERIAL","Initializing...")
global pnum
global INString
global IXString
global ISString
global IAString
global IMString
cur = db.cursor()
#--------------------------------------------------------------------------------------------------------#
#----begin building string to send out-------------------------------------------------------------------#
#--------------------------------------------------------------------------------------------------------#
# set numbers of parameters to build the string to send to the coordinator
# count the number of nodes
sql = "select count(*) as CNT from vwnodes WHERE nodetype != 0" #exclude external node
cur.execute(sql)
for (CNT) in cur:
nodeNum=parseint("{}".format(CNT))
INString = INString + str(nodeNum*pnum)
sql = "select count(*) as CNT from vwnodes WHERE nodetype = 2" #xbee nodes
cur.execute(sql)
for (CNT) in cur:
nodeNum=parseint("{}".format(CNT))
IXString = IXString + str(nodeNum)
# retrieve node data and buid initialization strings
sql = "select id, xbee_high_address, xbee_low_address, nodetype from vwnodes WHERE nodetype != 0 AND status = 1 order by id"
cur.execute(sql)
for (id, xbee_high_address, xbee_low_address, nodetype) in cur:
INString = INString + "," + "{}".format(id) + "," + "{}".format(nodetype) + ",0,0,1"
if int("{}".format(nodetype)) == 2: #xbee
IXString = IXString + "," + "{}".format(id) + "," + "{}".format(xbee_high_address) + "," + "{}".format(xbee_low_address)
#db.commit()
# count the number of sensors
sql = "select count(*) as CNT from vwsensors where tbNodeType_id != 0 and pin_number < 30"
cur.execute(sql)
for (CNT) in cur:
sensorNum=parseint("{}".format(CNT))
ISString = ISString + str(sensorNum*pnum)
db.commit()
#//col 0=node 1=sensor 2=value 3=alarm 4=spare
#retrieve sensor data and build initialization strings
sql = "SELECT nodeid,tbnodetype_id,tbsensortype_id,pin_number FROM vwsensors where tbnodetype_id != 0 and pin_number < 30 and tbstatus_id = 1 order by nodeid,pin_number"
cur.execute(sql)
for (nodeid,tbnodetype_id,tbsensortype_id,pin_number) in cur:
ISString = ISString + "," + "{}".format(nodeid) + "," + "{}".format(pin_number) + ",0,0,0"
#db.commit()
# count the number of actuators
sql = "select count(*) as CNT from vwactuator"
cur.execute(sql)
for (CNT) in cur:
actuatorNum=parseint("{}".format(CNT))
IAString = IAString + str(actuatorNum*pnum)
db.commit()
#//col 0=node 1=sensor 2=value 3=alarm 4=spare
#retrieve actuator data and build initialization strings
sql = "select tbnode_id,pinnumber from tbactuator order by tbnode_id,pinnumber"
cur.execute(sql)
for (tbnode_id,pinnumber) in cur:
IAString = IAString + "," + "{}".format(tbnode_id) + "," + "{}".format(pinnumber) + ",0,0,0"
# count the number of methods
sql = "select count(*) as CNT from vwmethods"
cur.execute(sql)
for (CNT) in cur:
methodNum=parseint("{}".format(CNT))
IMString = IMString + str(methodNum*pnum)
db.commit()
#//col 0=node 1=actuator 2=method 3=value 4=spare
#retrieve method data and build initialization strings
sql = "select tbnode_id,pinnumber,method from vwmethods order by tbnode_id,pinnumber,method"
cur.execute(sql)
for (tbnode_id,pinnumber,method) in cur:
IMString = IMString + "," + "{}".format(tbnode_id) + "," + "{}".format(pinnumber) + "," + "{}".format(method) + ",0,0"
db.commit()
cur.close
#--------------------------------------------------------------------------------------------------------#
#----end building string to send out---------------------------------------------------------------------#
#--------------------------------------------------------------------------------------------------------#
#--------------------------------------------------------------------------------------------------------#
#----begin Sending init string to the coordinator -------------------------------------------------------#
#--------------------------------------------------------------------------------------------------------#
output("PYSERIAL","Init sensors")
ret = initSendStringsToCoordinator(ISString)
if ret == 0: #if fails
return 0
output("PYSERIAL","Init actuators")
#output(IAString)
ret = initSendStringsToCoordinator(IAString)
if ret == 0: #if fails
return 0
output("PYSERIAL","Init methods")
ret = initSendStringsToCoordinator(IMString)
if ret == 0: #if fails
return 0
output("PYSERIAL","Init nodes")
ret = initSendStringsToCoordinator(INString)
if ret == 0: #if fails
return 0
output("PYSERIAL","Init node addresses Xbee")
ret = initSendStringsToCoordinator(IXString)
if ret == 0: #if fails
return 0
#--------------------------------------------------------------------------------------------------------#
#----end Sending init string to the coordinator ---------------------------------------------------------#
#--------------------------------------------------------------------------------------------------------#
# if Ok
cur.close
output ("PYSERIAL","End Initializing")
return 1
def isResponse(response):
if "CX0" in str(response, 'utf-8'):
return True
elif "CX1" in str(response, 'utf-8'):
return True
else:
return False
def isResponseOK(response):
print(response)
res = False
if "CX0" in str(response, 'utf-8'):
print(1)
res = False
elif "CX1" in str(response, 'utf-8'):
print(2)
res = True
else:
print(3)
res = False
print("qqq:")
print("xx:", str(response))
return res
#--------------------------------------------------------------------------------------------------------#
#---- get serial incoming data ---------------------------------------------------------------------#
#--------------------------------------------------------------------------------------------------------#
def getSerialData(qIN, qOUT, qResponse):
output("PYSERIAL","init serial")
serCoord.flushInput()
readSerial = ""
serCoord.timeout = 1
while True:
gpio.digitalWrite(0,gpio.LOW) #write high value to pin
serialBuffer = serCoord.inWaiting()
if serialBuffer > 0: #data available on serial
gpio.digitalWrite(0,gpio.HIGH)
readSerial = serCoord.readline()
readSerial.rstrip(endSerialChars)
if isResponse(readSerial) == True:
# while not qResponse.empty():
# qResponse.get()
#qResponse.put(readSerial)
#output("Response received")
aa=1
else:
qIN.put(readSerial)
# print("Data received:", serialBuffer)
#print("Q size:", qIn.qsize())
while not qOUT.empty():
#print("Q OUT size:", qOUT.qsize())
stg = qOUT.get()
serCoord.write(bytes(stg, 'UTF-8'))
output("PYSERIAL","String sent: " + str(stg))
#--------------------------------------------------------------------------------------------------------#
#---- End AUTOreceiveDataFromCoordinator --------------------------------------------------------------------#
#--------------------------------------------------------------------------------------------------------#
def initSendStringsToCoordinator(stg):
serCoord.flushInput()
# output("PYSERIAL",stg)
# send the node string
attemptsCnt = 0
while serCoord.inWaiting() == 0 and attemptsCnt < 5:
ret = serCoord.write(bytes(stg, 'UTF-8'))
readSerial = serCoord.readline()
if readSerial == CommExecutedTrue:
return 1
time.sleep(0.2)
break
elif readSerial == CommExecutedFalse:
# write error in log
log("E", "Error "+stg)
else:
attemptsCnt = attemptsCnt + 1
#output("PYSERIAL",attemptsCnt)
continue
# write error in log
log("E", "no serial available")
return 0
def QueueServerClient(HOST, PORT, AUTHKEY):
class QueueManager(SyncManager):
pass
QueueManager.register('get_queue')
QueueManager.register('get_name')
QueueManager.register('get_description')
manager = QueueManager(address = (HOST, PORT), authkey = AUTHKEY)
manager.connect() # This starts the connected client
return manager
#------- Main section ----------------------------#
#------- Run once --------------------------------#
log("I", "Initialize coordinator")
gpio = Gpio()
gpio.pinMode(0, gpio.OUTPUT)
ret = 0
curInit = db.cursor()
#truncate output tables
curInit.callproc('init')
curInit.close()
# create three connected managers
qmIn = QueueServerClient(HOST, PORT0, AUTHKEY)
qmOut = QueueServerClient(HOST, PORT1, AUTHKEY)
qmSql = QueueServerClient(HOST, PORT2, AUTHKEY)
qmResp = QueueServerClient(HOST, PORT3, AUTHKEY)
# Get the queue objects from the clients
qIn = qmIn.get_queue()
qOut = qmOut.get_queue()
qSql = qmSql.get_queue()
qResp = qmResp.get_queue()
while ret == 0:
INString = "IN" # to send Node data to the coordinator
ISString = "IS" # to send Sensor data to the coordinator
IXString = "IX" # to send Address data to the coordinator
IAString = "IA" # to send Actuators data to the coordinator
IMString = "IM" # to send Methods data to the coordinator
ret = initCoordinator()
ret = 1
#------- End run once -------------------------#
log("I", "Start main loop")
getSerialData(qIn, qOut, qResp)
|
theflorianmaas/dh
|
Python/dhproc/p_serial.py
|
Python
|
mit
| 15,394
|
import os
from flask import current_app
from flask.cli import FlaskGroup, run_command
from opsy.db import db
from opsy.app import create_app, create_scheduler
from opsy.utils import load_plugins
DEFAULT_CONFIG = '%s/opsy.ini' % os.path.abspath(os.path.curdir)
def create_opsy_app(info):
return create_app(config=os.environ.get('OPSY_CONFIG', DEFAULT_CONFIG))
cli = FlaskGroup(create_app=create_opsy_app, # pylint: disable=invalid-name
add_default_commands=False,
help='The Opsy management cli.')
cli.add_command(run_command)
@cli.command('run-scheduler')
def run_scheduler():
"""Run the scheduler."""
scheduler = create_scheduler(current_app)
try:
current_app.logger.info('Starting the scheduler')
scheduler.start()
except (KeyboardInterrupt, SystemExit):
scheduler.shutdown()
current_app.logger.info('Stopping the scheduler')
@cli.command('shell')
def shell():
"""Run a shell in the app context."""
from flask.globals import _app_ctx_stack
banner = 'Welcome to Opsy!'
app = _app_ctx_stack.top.app
shell_ctx = {'create_app': create_app,
'create_scheduler': create_scheduler,
'db': db}
for plugin in load_plugins(current_app):
plugin.register_shell_context(shell_ctx)
shell_ctx.update(app.make_shell_context())
try:
from IPython import embed
embed(user_ns=shell_ctx, banner1=banner)
return
except ImportError:
import code
code.interact(banner, local=shell_ctx)
@cli.command('init-cache')
def init_cache():
"""Drop everything in cache database and rebuild the schema."""
current_app.logger.info('Creating cache database')
db.drop_all(bind='cache')
db.create_all(bind='cache')
db.session.commit()
def main():
with create_opsy_app(None).app_context():
for plugin in load_plugins(current_app):
plugin.register_cli_commands(cli)
cli()
|
derekmoyes/opsy
|
opsy/shell.py
|
Python
|
mit
| 1,995
|
"""
sprintkit.services
==================
Implementations of most Developer Sandbox Services.
:Copyright: (c) 2011 by Sprint.
:License: MIT, see LICENSE for more details.
"""
from ConfigParser import SafeConfigParser
from datetime import datetime
from hashlib import md5
import json
import os
import time
import urlparse
import uuid
from restkit import Resource
from restkit.errors import RequestError, RequestTimeout, ResourceError
from sprintkit import errors
from sprintkit.gps import Coordinates, Gps2dFix
class Config(dict):
'''Reads configuration information for the Sandbox API gateway.
:Parameters:
* path (string) - The path to your config file (default=None).
:class:`Config` is a sub-classed version of dict and therefore can be used
just like a dict to store Sandbox configuration information. If you do not
specify a `path` it will first try to read a config first from the current
working directory from a file named `sprintkit.conf`, next it will try
to read from the default path: `$HOME/.sprintkit.conf`
The default config file is in ini format
Here is a sample config file::
[sprintkit]
key = <sprint_developer_key>
secret = <sprint_developer_secret>
host = test.sprintdevelopersandbox.com
path = /developerSandbox/resources/v1
:class:`Config` will also try to read the Sandbox Key and Sandbox
Secret from the environment variables `SPRINTKEY` and
`SPRINTSECRET`. It will try these last so they can be used to
override values stored in the configuration file.
:class:`Config` does not provide file writing capabilities, so any
changes made to a config instance programatically will need to be
also made in the config file in order to make the changes permanent.
'''
def __init__(self, path=None):
if path == None:
home_dir = os.path.expanduser('~')
run_dir = os.getcwd()
default_runpath = os.path.join(run_dir, "sprintkit.conf")
default_homepath = os.path.join(home_dir, ".sprintkit.conf")
if os.path.exists(default_runpath):
self.path = default_runpath
else:
self.path = default_homepath
else:
self.path = path
def load(self):
"""Read the configuration file from path stored in `self.path`.
:Raises: (:class:`sprintkit.errors.SprintKitError`) - If config
file could not be found.
"""
if os.path.exists(self.path):
config = {}
config_file = open(self.path, 'r')
parser = SafeConfigParser()
parser.readfp(config_file)
config.update(parser.items('sprintkit'))
if os.environ.has_key('SPRINTKEY') and os.environ.has_key('SPRINTSECRET'):
config['key'] = os.environ['SPRINTKEY']
config['secret'] = os.environ['SPRINTSECRET']
self.update(config)
else:
raise errors.SprintkitError('Could not find configuration file: %s' % self.path)
return self
class SandboxResource(Resource):
"""A class that manages connections to Sandbox Resources.
Sub-class this to add support for new Sandbox resources not yet available in
SprintKit.
SandboxResource is a sub-class of a restkit Resource, so it accepts all its
parameters.
"""
def __init__(self, config=None, **kwargs):
if config is None:
self.config = Config()
"""A :class:`Config` instance for storing Sandbox credentials."""
self.config.load()
else:
self.config = config
self.api_url = urlparse.urlunparse((self.config['protocol'],
self.config['host'],
self.config['path'], '', '', ''))
super(SandboxResource, self).__init__(self.api_url,
follow_redirect=True,
max_follow_redirect=10, **kwargs)
def parse_response(self, response):
"""Parse a restkit Response payload into a json data dict.
:Parameters: response (:class:`restkit.wrappers.Response`) - Response
:Returns: (dict) - The raw Sandbox JSON data.
:Raises: :class:`sprintkit.errors.ParsingError`
"""
try:
body = response.body_string()
data = json.loads(body)
except:
raise errors.ParsingError("Malformed JSON data", body)
return data
def parse_errors(self, data):
"""Parse raw Sandbox JSON data looking for Sandbox thrown errors.
:Parameters: data (dict) - The raw Sandbox JSON data.
:Raises: :class:`sprintkit.errors.SandboxError`
"""
if 'error' in data.keys():
raise errors.SandboxError(data['error'])
def sign_params(self, params, secret):
"""Build a dict of URL parameters and add a sig
:Parameters:
* params (dict) - Dictionary of URL query param key/val pairs
* secret (str) - The API Secret used to create signature.
:Returns: (dict) - The parameters with a signature added.
.. note::
Read the documentation; http://goo.gl/Wu7T5 for details on
generating the signature. Note, these parameters MUST NOT be
url quoted, before generating the signature.
"""
#Update the timestamp if there is one
if 'timestamp' in params.keys():
params['timestamp'] = self.make_timestamp()
#Stringify all values
for key, val in params.items():
params[key] = str(val)
#Update the authentication signature if there is one
if 'sig' in params.keys():
del(params['sig'])
#Make the authentication signature slug
pairs = ["%s%s" % (key, params[key]) for key in sorted(params.keys())]
#Sign it using our secret
rawsig = "".join(pairs) + secret
params['sig'] = md5(rawsig).hexdigest()
return params
def make_timestamp(self):
"""Generate an API timestamp.
:Returns: (string) - The timestamp.
.. note::
The sandbox REST APIs require a timestamp parameter to help
prevent replay attacks. This is a convienence utilty to
create those timestamps so that they are properly formatted.
The timestamp should be the current time in the format:
[YYYY]-[MM]-[DD]T[HH]:[MM]:[SS][ZZZ]
[HH] refers to a zero-padded hour between 00 and 23 (where
00 is used to notate midnight at the start of a calendar
day).
"""
tnow = datetime.utcnow().replace(microsecond=0) #remove microseconds
tzone = "UTC"
timestamp = datetime.isoformat(tnow) + tzone
return timestamp
class SMS(SandboxResource):
"""A Resource used to send SMS messages."""
def send(self, mdns, msg):
"""Sends an SMS text message to a device or list of devices.
:Parameters:
* mdns (string) - The MDN(s) to send the message to.
* msg (string) - The text message (160 characters).
.. note::
The `mdns` parameter must be a valid 10-digit MDN, or a comma
separated list of mdns. For example::
mdns = "0005551111"
mdns = "0005551111,0005551212"
:Returns: (dict) - The raw JSON Sandbox data.
.. note::
Here is a sample response for a successful transaction::
{'MessagingResponse':
[{'status': 'S',
'tranno': 'e6d6bd9',
'mdn': '9995551212',
'gcode': '1000'}]}
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'mdns': mdns,
'msg': msg,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('sms.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
#We only report the first error we find
errs = [k for k in data.keys() if k != 'MessagingResponse']
if errs:
raise errors.SandboxError(errs[0])
if not 'MessagingResponse' in data.keys():
raise errors.ParsingError("Missing a MessagingResponse",
response)
return data
class Presence(SandboxResource):
"""A Resource to check if an MDN is reachable on the network.
:Parameters: config (:class:`Config`) - The Sandbox configuration.
"""
def get_presence(self, mdn):
"""Get the presence status of an MDN.
:Parameters: mdn (string) - The MDN to check for reachability.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'mdn': mdn,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('presence.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def reachable(self, mdn):
"""Check if an MDN is reachable.
:Parameters: mdn (string) - The MDN to check for reachability.
:Returns: (bool) - True if the `mdn` is reachable.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.SandboxError`
* :class:`sprintkit.errors.ParsingError`
.. note::
This is a convenience method. The same data can be extracted using
the `get_presence()` method.
"""
data = self.get_presence(mdn)
try:
status = data['status']
except KeyError as e:
raise errors.ParsingError("KeyError: '%s'." % status, data)
if status != 'Reachable' and status != 'Unreachable':
raise errors.ParsingError("ValueError: 'status' is incorrect.",
data)
return (status == 'Reachable')
class Location(SandboxResource):
"""A Resource for getting a location fix for an MDN.
:Parameters: config (:class:`Config`) - The Sandbox configuration.
"""
def get_location(self, mdn):
"""Get the location data for an `mdn`.
:Parameters: mdn (string) - The MDN to get location fix for (10 digits).
:Returns: (dict) - The raw Sandbox location data in JSON format.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.SandboxError`
* :class:`sprintkit.errors.ParsingError`
"""
params = {'mdn': mdn,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('location.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def locate(self, mdn):
"""Get the location data for an `mdn` (a convenience method).
:Parameters: mdn (string) - The MDN to get location fix for (10 digits).
:Returns: (:class:`sprintkit.gps.Gps2dFix`) - The Gps2dFix object
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.SandboxError`
* :class:`sprintkit.errors.ParsingError`
.. note::
This is a convenience method that returns an instance of
:class:`sprintkit.gps.Gps2dFix` which contains all of the
pertinent location information. To get the lat/lon use the
coordinates attribute::
lat = Gps2dFix.coordinates.lattitude
lon = Gps2dFix.coordinates.longitude
(lat, lon) = Gps2dFix.coordinates
"""
data = self.get_location(mdn)
try:
lat = float(data['lat'])
lon = float(data['lon'])
accuracy = int(data['accuracy'])
except KeyError as e:
raise errors.ParsingError("Missing %s" % e, data)
except ValueError as e:
raise errors.ParsingError(e, data)
coord = Coordinates((lat,lon))
return Gps2dFix(datetime.now(), coord, errors={'hepe':accuracy})
class Perimeter(SandboxResource):
"""A class used for checking if an mdn is within a geographic area
specified by its Coordinates and a radius in meters.
:Parameters:
* coordinates (:class:`sprintkit.gps.Coordinates`, or tuple) - The
center lat/lon of the perimeter.
* radius (integer) - Radius of the perimeter in meters.
.. note::
The typical usage for Perimeter would be to create a perimeter based on
a set of center coordinates and radius, then call its methods to check
if devices are within the perimeter.
"""
def __init__(self, coordinates, radius, config=None, **kwargs):
self.coordinates = Coordinates(coordinates)
self.radius = radius
super(Perimeter, self).__init__(config, **kwargs)
def get_perimeter(self, mdn):
"""Check if an mdn is inside this Perimeter.
:Parameters: mdn (string): The mdn of the device to check
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.SandboxError`
* :class:`sprintkit.errors.ParsingError`
"""
lat = repr(self.coordinates.latitude)
lon = repr(self.coordinates.longitude)
rad = str(self.radius)
params = {'mdn': mdn,
'lat': lat,
'long': lon,
'rad': rad,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/checkPerimeter.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def inside(self, mdn):
"""Returns True if the mdn is inside this Perimeter.
:Parameters: mdn (string): The mdn of the device to check the perimeter for.
:Returns: (bool) - True if mdn is inside the perimeter, False otherwise.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.SandboxError`
* :class:`sprintkit.errors.ParsingError`
.. note::
This method provides a simple perimeter check, if you also
need to get the coordinates of the device at the same time
as you check the perimeter, use the `get_perimeter` method
instead.
"""
data = self.get_perimeter(mdn)
try:
status = data['CurrentLocation']
except KeyError as e:
raise errors.ParsingError("Missing the CurrentLocation field", data)
if status != 'INSIDE' and status != 'OUTSIDE':
raise errors.ParsingError("ValueError for CurrentLocation", data)
return (status == 'INSIDE')
def check(self, mdn):
"""Check if an MDN is inside this Perimeter (a convenience
method).
:Parameters: mdn (string): The mdn of the device to check the perimeter for.
:Returns: (tuple) - (bool, :class:`sprintkit.gps.Gps2dFix`)
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.SandboxError`
* :class:`sprintkit.errors.ParsingError`
.. note::
This convenience method returns a tuple (inside, fix). The
boolean `inside` is True if the device is inside the fence
and `fix` contains :class:`sprintkit.gps.Gps2dFix` which has
all of the pertinent location information. To get the
lat/lon use the coordinates attribute of Gps2dFix::
lat = Gps2dFix.coordinates.lattitude
lon = Gps2dFix.coordinates.longitude
(lat, lon) = Gps2dFix.coordinates
"""
data = self.get_perimeter(mdn)
timestamp = datetime.now()
try:
lat = float(data['Latitude'])
lon = float(data['Longitude'])
accuracy = float(data['Accuracy'])
status = data['CurrentLocation']
except KeyError as e:
raise errors.ParsingError("Missing %s" % e, data)
except ValueError as e:
raise errors.ParsingError(e, data)
coord = Coordinates((lat,lon))
fix = Gps2dFix(timestamp, coord, errors={'hepe':accuracy})
try:
status = data['CurrentLocation']
except KeyError as e:
raise errors.ParsingError("Missing the CurrentLocation field", data)
if status != 'INSIDE' and status != 'OUTSIDE':
raise errors.ParsingError("ValueError for CurrentLocation", data)
inside = (status == 'INSIDE')
return (inside, fix)
def distance_to(self, mdn):
"""Calculate the distance from the Perimeter to the `mdn`.
:Parameters:
* mdn (string) - The device MDN to calculate distance to.
:Returns: (int) - The distance to the MDN in meters.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
.. note::
This method does not use the Sandbox functions to calculate
the distance, instead the location is first determined using
the `locate()` method, then the distance to these
coordinates is calculated using the haversine formula.
"""
current_location = self.locate(mdn).coordinates
return self.coordinates - current_location
class Fence(SandboxResource):
"""A Sandbox Resource for modifying geofences.
:Parameters:
* config (:class:`Config`) - The Sandbox configuration.
* fenceid (integer): A unique number for identifying the fence.
* name (string): A text name for the fence.
* coordinates (:class:`sprintkit.gps.Coordinates`): The coordinates for the center of the fence.
* radius (integer): The radius of fence in meters.
* days (string): The days of week to monitor fence [SMTWHFA].
* start_time (string): The time when fence becomes active "HHMM".
* end_time (string): The time with fence becomes inactive "HHMM".
.. note::
This object is not intended to be instantiated by the end user
directly, instead it is returned when calling the GeoFence.fences()
method.
"""
def __init__(self, fenceid, name, coordinates, radius, days, start_time,
end_time, status, config=None, **kwargs):
self.fenceid = fenceid
self.name = name
self.coordinates = coordinates
self.radius = radius
self.days = days
self.start_time = start_time
self.end_time = end_time
self.status = status
super(Fence, self).__init__(config, **kwargs)
def activate(self):
"""Activate this Fence.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'fenceId': self.fenceid,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/activate.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
try:
message = data['Message']
except KeyError:
raise errors.ParsingError("Missing a `Message` field.", data)
if message == 'FENCE_ACTIVATED':
self.status = 'active'
return data
else:
raise errors.GeoFenceError(message)
def deactivate(self):
"""De-activate this Fence.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'fenceId': self.fenceid,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/deactivate.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def get_devices(self):
"""Returns the devices associated with this fence.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'fenceId': self.fenceid,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/listDevices.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def devices(self):
"""Returns the devices associated with this fence (a convenience
method).
:Returns: (dict) - The devices associated with a fence.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
.. note::
This method returns a dictionary that maps an mdn (string) to a
deviceid (integer) for each device that is being monitored within this
geofence. For example::
devices = {"1115551212": 102}
"""
result = self.get_devices()
device_list = result['Device']
devices = {}
for device in device_list:
if device.has_key('Message'):
return devices
devices[device['MDN']] = int(device['DeviceID'])
return devices
def add_device(self, mdn):
"""Add a device to be monitored inside this Fence.
:Parameters: mdn (string) - The mdn of the device to be monitored.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'fenceId': self.fenceid,
'mdn': mdn,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/addDevice.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
try:
message = data['Message']
except KeyError:
raise errors.ParsingError("Missing a `Message` field.")
if message != 'DEVICE_ADDED':
raise errors.GeoFenceError(message)
else:
return data
def delete_device(self, mdn):
"""Delete a device associated with this Fence.
:Parameters: mdn (string) - The mdn of the device to be removed from monitoring.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
.. note::
The Sandbox does not provide a method to remove a device from a
fence using its mdn, so we have to first make a call to
get_devices() to get the deviceid associated with the mdn.
"""
devices = self.devices()
try:
deviceid = devices[mdn]
except KeyError:
raise errors.GeoFenceError("DEVICE_NOTFOUND")
params = {'deviceId': deviceid,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/deleteDevice.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
try:
message = data['Message']
except KeyError:
raise errors.ParsingError("Missing a `Message` field.")
if message != 'DEVICE_DELETED':
raise errors.GeoFenceError(message)
else:
return data
def get_recipients(self):
"""Get the recipients of notification of geofence events.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'fenceId': self.fenceid,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/listRecipients.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def recipients(self):
"""Get the recipients of notification of geofence events.
This is a convenience method. This method returns a dictionary
that maps a recipient mdnurl (string) to a recpientid (integer).
The mdnurl is either an MDN or a URL that events for this
geofence will be sent. Here is a sample::
recipients = {"1115551212": 105}
:Returns: (dict)
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
result = self.get_recipients()
recipients = {}
recipient_list = result['Recipient']
for recipient in recipient_list:
try:
recipients[recipient['MDNURL']] = int(recipient['RecipientID'])
except:
return recipients
return recipients
def add_recipient(self, recipient):
"""Add a recipient for a Fence notification event.
:Parameters: recipient (string) - Either an MDN or a URL
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'fenceId': self.fenceid,
'mdnURL': recipient,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/addRecipient.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def delete_recipient(self, recipient):
"""Delete a recipient of a geofence notification.
:Parameters: recipient (string) - Either an MDN or a URL.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
recipients = self.recipients()
if not recipients.has_key(recipient):
raise errors.GeoFenceError("UNKNOWN_RECIPIENT")
recipientid = recipients[recipient]
params = {'recipientId': recipientid,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/deleteRecipient.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
class GeoFence(SandboxResource):
"""A SandboxResource to retrieve and create geofences.
:Parameters: config (:class:`Config`) - The Sandbox configuration.
"""
def get_fences(self):
"""Get all of the geofences associated with a Sandbox user account.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
.. note::
This method retrieves the list of geofences for this user account.
Here is a sample of the data returned::
{u'Fence': [{u'Status': u'Inactive',
u'FenceID': u'139',
u'Name': u'test',
u'Days': u'W',
u'Longitude':
u'-94.1234',
u'StartTime': u'1100',
u'Latitude': u'38.1234',
u'LastMonitorTime': u'NEVER',
u'EndTime': u'2200',
u'Dimensions': u'2000'}]}
"""
params = {'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/list.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def fences(self, match=None):
"""Get all of the geofences associated with a Sandbox user account.
:Parameters:
* match - (int or string) - The `fenceid` or `name` of a fence.
:Returns: (list) - A List of Fence objects.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
.. note::
This method returns a list of Fence objects. Each Fence
represents a Sandbox geofence. With the Fence objects you
can control the individual geofences. You can filter the
list of Fence objects by supplying a `match` argument. This
is either the name of the geofence (string) or the fenceid
(an integer).
"""
fences = []
data = self.get_fences()
try:
for fence in data['Fence']:
if 'Message' in fence.keys():
return []
fenceid = int(fence['FenceID'])
name = fence['Name']
lat = float(fence['Latitude'])
lon = float(fence['Longitude'])
coordinates = Coordinates((lat,lon))
radius = int(fence['Dimensions'])
days = fence['Days']
start_time = fence['StartTime']
end_time = fence['EndTime']
status = fence['Status'].lower()
if match:
if isinstance(match, str) and match == name:
fences.append(Fence(fenceid, name, coordinates, radius,
days, start_time, end_time, status,
self.config))
elif isinstance(match, int) and match == fenceid:
fences.append(Fence(fenceid, name, coordinates, radius,
days, start_time, end_time, status,
self.config))
else:
continue
else:
fences.append(Fence(fenceid, name, coordinates, radius, days,
start_time, end_time, status, self.config))
except KeyError as e:
raise errors.ParsingError("KeyError '%s'." % e, data)
return fences
def add_fence(self, name, start_time, end_time, coordinates,
radius, interval, days, notify_event):
"""Add a fence to a Sandbox user account.
:Parameters:
* name (string) - A name to give this geofence.
* start_name (string) - Time when the fence becomes active "HHMM"
* end_time (string) - Time when the fence becomes inactive "HHMM"
* coordinates (:class:`sprintkit.gps.Coordinates`) - The lat/lon
center of the fence.
* radius (integer) - Radius of fence in meters.
* interval (integer) - How often to check the fence (in 5 minute increments).
* days (string) - Days of week to check the fence.
* notify_event (string) - What event triggers a notification.
.. note::
The `days` parameter is a string that corresponds to which days of the
week that a fence will be active. Each day of the week is represented
by a letter, and these letters can be concatenated::
sunday = 'S'
monday = 'M'
tuesday = 'T'
wednesday = 'W'
thursday = 'H'
friday = 'F'
saturday = 'A'
days = sunday + wednesday + friday
days = "SWF" #Active on days Sunday, Wednesday and Friday
The `start_time` and `end_time` parameters are strings that represent
what time a fence will become active and what time it will become
inactive. The string is in the format "HHMM"" where HH is the 24-hour
time (00-23) where 00 is midnight. MM is the minutes (00-59).
The `notify_event` parameter specifies whether the fence should
notify on 'in', 'out' or 'both' events.
:Returns: (:class:`sprintkit.services.Fence`) - The Fence that was
added.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
coordinates = Coordinates(coordinates)
params = {'name': name,
'strtTime': start_time,
'endTime': end_time,
'lat': repr(coordinates.latitude),
'long': repr(coordinates.longitude),
'dim': radius,
'interval': interval,
'days': days,
'notifyEvent': notify_event,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/add.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
if data['message'] == 'FENCE_ADDED':
fenceid = int(data['ID'])
fence = [fence for fence in self.fences() if fence.fenceid == fenceid]
if len(fence) == 1:
return fence[0]
else:
raise errors.GeoFenceError("FENCE_NOTADDED")
else:
raise errors.GeoFenceError(data['message'])
def delete_fence(self, fence):
"""Delete a geofence from this account.
:Parameters: (:class:`sprintkit.services.Fence`) - A Fence object.
:Returns: (dict) - The raw Sandbox JSON data.
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'fenceId': fence.fenceid,
'timestamp': True,
'key': self.config['key'],
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('geofence/delete.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
class Account(SandboxResource):
"""A class for configuring devices associated with a developer account.
:Parameters:
* config (dict) - A :class:`Config` instance (default=None).
"""
def get_devices(self, status=None, mdn=None):
"""Retrieve devices associated with this developer account.
:Optional Parameters:
* status (string) - The authorization status criteria to filter on.
* mdn (string) - The MDN to get status for.
:Returns: (dict) - The raw JSON Sandbox Data.
.. note::
Retrieves all of the devices associated with this developer
account. The devices to be returned can be filtered by
specifying either a device `status` or an `mdn`.
The `status` parameter filters the devices that are returned. It
has the following permitted values:
'p' for devices that are `pending`,
'a' for devices that are `approved`,
'x' for devices that are `declined`, and
'd' for devices that have been `deleted`.
The `mdn` parameter can be used to return authorization status for
a single device instead of all devices associated with this
account.
The authorization data is returned as a dict. Here is an example::
{"username": "your_username",
"devices": {
"approved": ["1115551212", "1115551213"],
"declined": [],
"pending": [],
"deleted": ["1115551234"]},
"authStatus": "Declined"}
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'key': self.config['key'],
'timestamp': True,
'sig': True}
if status:
params[status] = status
if mdn:
params[mdn] = mdn
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('devices.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def add_device(self, mdn):
"""Add a device to this developer account.
:Parameters: mdn (string) - The MDN to add to this account.
:Returns: (dict) - The raw JSON Sandbox data.
.. note::
This method returns a dict containing the status of the add
operation. On success it returns the following::
{u'response': u'SUCCESS'}
If the Sandbox could not add the device to the account it
returns a failure message, for example::
{u'response': u'FAILED'}
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'method': 'add',
'mdn': mdn,
'key': self.config['key'],
'timestamp': True,
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('device.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
def delete_device(self, mdn):
"""Delete a device from this developer account.
:Parameters: mdn (string) - The MDN to add to this account.
:Returns: (dict) - The raw JSON sandbox data.
.. note::
On success returns::
{u'response': u'SUCCESS'}
:Raises:
* :class:`sprintkit.errors.ConnectionError`
* :class:`sprintkit.errors.ParsingError`
* :class:`sprintkit.errors.SandboxError`
"""
params = {'method': 'delete',
'mdn': mdn,
'key': self.config['key'],
'timestamp': True,
'sig': True}
params = self.sign_params(params, self.config['secret'])
try:
response = self.get('device.json', params_dict=params)
except (RequestError, RequestTimeout) as e:
raise errors.ConnectionError(str(e))
data = self.parse_response(response)
self.parse_errors(data)
return data
|
ericem/sprintkit
|
src/sprintkit/services.py
|
Python
|
mit
| 44,648
|
import sys
import os
import cv2
import numpy as np
class Detector:
def detect(self, src):
raise NotImplementedError("Every Detector must implement the detect method.")
class SkinDetector(Detector):
"""
Implements common color thresholding rules for the RGB, YCrCb and HSV color
space. The values are taken from a paper, which I can't find right now, so
be careful with this detector.
"""
def _R1(self, BGR):
# channels
B = BGR[:, :, 0]
G = BGR[:, :, 1]
R = BGR[:, :, 2]
e1 = (R > 95) & (G > 40) & (B > 20) & (
(np.maximum(R, np.maximum(G, B)) - np.minimum(R, np.minimum(G, B))) > 15) & (np.abs(R - G) > 15) & (
R > G) & (
R > B)
e2 = (R > 220) & (G > 210) & (B > 170) & (abs(R - G) <= 15) & (R > B) & (G > B)
return (e1 | e2)
def _R2(self, YCrCb):
Y = YCrCb[:, :, 0]
Cr = YCrCb[:, :, 1]
Cb = YCrCb[:, :, 2]
e1 = Cr <= (1.5862 * Cb + 20)
e2 = Cr >= (0.3448 * Cb + 76.2069)
e3 = Cr >= (-4.5652 * Cb + 234.5652)
e4 = Cr <= (-1.15 * Cb + 301.75)
e5 = Cr <= (-2.2857 * Cb + 432.85)
return e1 & e2 & e3 & e4 & e5
def _R3(self, HSV):
H = HSV[:, :, 0]
S = HSV[:, :, 1]
V = HSV[:, :, 2]
return ((H < 25) | (H > 230))
def detect(self, src):
if np.ndim(src) < 3:
return np.ones(src.shape, dtype=np.uint8)
if src.dtype != np.uint8:
return np.ones(src.shape, dtype=np.uint8)
srcYCrCb = cv2.cvtColor(src, cv2.COLOR_BGR2YCR_CB)
srcHSV = cv2.cvtColor(src, cv2.COLOR_BGR2HSV)
skinPixels = self._R1(src) & self._R2(srcYCrCb) & self._R3(srcHSV)
return np.asarray(skinPixels, dtype=np.uint8)
class CascadedDetector(Detector):
"""
Uses the OpenCV cascades to perform the detection. Returns the Regions of Interest, where
the detector assumes a face. You probably have to play around with the scaleFactor,
minNeighbors and minSize parameters to get good results for your use case. From my
personal experience, all I can say is: there's no parameter combination which *just
works*.
"""
def __init__(self, cascade_fn="./cascades/haarcascade_frontalface_alt2.xml", scaleFactor=1.2, minNeighbors=5,
minSize=(30, 30)):
if not os.path.exists(cascade_fn):
raise IOError("No valid cascade found for path=%s." % cascade_fn)
self.cascade = cv2.CascadeClassifier(cascade_fn)
self.scaleFactor = scaleFactor
self.minNeighbors = minNeighbors
self.minSize = minSize
def detect(self, src):
if np.ndim(src) == 3:
src = cv2.cvtColor(src, cv2.COLOR_BGR2GRAY)
src = cv2.equalizeHist(src)
rects = self.cascade.detectMultiScale(src, scaleFactor=self.scaleFactor, minNeighbors=self.minNeighbors,
minSize=self.minSize)
if len(rects) == 0:
return []
rects[:, 2:] += rects[:, :2]
return rects
class SkinFaceDetector(Detector):
"""
Uses the SkinDetector to accept only faces over a given skin color tone threshold (ignored for
grayscale images). Be careful with skin color tone thresholding, as it won't work in uncontrolled
scenarios (without preprocessing)!
"""
def __init__(self, threshold=0.3, cascade_fn="./cascades/haarcascade_frontalface_alt2.xml", scaleFactor=1.2,
minNeighbors=5, minSize=(30, 30)):
self.faceDetector = CascadedDetector(cascade_fn=cascade_fn, scaleFactor=scaleFactor, minNeighbors=minNeighbors,
minSize=minSize)
self.skinDetector = SkinDetector()
self.threshold = threshold
def detect(self, src):
rects = []
for i, r in enumerate(self.faceDetector.detect(src)):
x0, y0, x1, y1 = r
face = src[y0:y1, x0:x1]
skinPixels = self.skinDetector.detect(face)
skinPercentage = float(np.sum(skinPixels)) / skinPixels.size
print skinPercentage
if skinPercentage > self.threshold:
rects.append(r)
return rects
if __name__ == "__main__":
# script parameters
if len(sys.argv) < 2:
raise Exception("No image given.")
inFileName = sys.argv[1]
outFileName = None
if len(sys.argv) > 2:
outFileName = sys.argv[2]
if outFileName == inFileName:
outFileName = None
# detection begins here
img = np.array(cv2.imread(inFileName), dtype=np.uint8)
imgOut = img.copy()
# set up detectors
# detector = SkinFaceDetector(threshold=0.3, cascade_fn="/home/philipp/projects/opencv2/OpenCV-2.3.1/data/haarcascades/haarcascade_frontalface_alt2.xml")
detector = CascadedDetector(
cascade_fn="/home/philipp/projects/opencv2/OpenCV-2.3.1/data/haarcascades/haarcascade_frontalface_alt2.xml")
eyesDetector = CascadedDetector(scaleFactor=1.1, minNeighbors=5, minSize=(20, 20),
cascade_fn="/home/philipp/projects/opencv2/OpenCV-2.3.1/data/haarcascades/haarcascade_eye.xml")
# detection
for i, r in enumerate(detector.detect(img)):
x0, y0, x1, y1 = r
cv2.rectangle(imgOut, (x0, y0), (x1, y1), (0, 255, 0), 1)
face = img[y0:y1, x0:x1]
for j, r2 in enumerate(eyesDetector.detect(face)):
ex0, ey0, ex1, ey1 = r2
cv2.rectangle(imgOut, (x0 + ex0, y0 + ey0), (x0 + ex1, y0 + ey1), (0, 255, 0), 1)
# display image or write to file
if outFileName is None:
cv2.imshow('faces', imgOut)
cv2.waitKey(0)
cv2.imwrite(outFileName, imgOut)
|
idf/FaceReader
|
facerec_py/facedet/detector.py
|
Python
|
mit
| 5,760
|
from datetime import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from apps.mountains.models import Mountain, Climb
class ClimbTest(TestCase):
def setUp(self):
User.objects.create(
username = "test_user",
email = "test@testemail.com",
password = "testpassword",
)
Mountain.objects.create(
name = "Test Mountain",
elevation = 14000,
difficulty = "So Hard",
lat = 2.4584,
long = -204.4548,
)
def test_save_climb_with_correct_info(self):
now = datetime.now()
test_climb = Climb.objects.create(
climber = User.objects.get(username="test_user"),
mountain = Mountain.objects.get(name="Test Mountain"),
start_date = now.strftime('%Y-%m-%d'),
summit_date = now.strftime('%Y-%m-%d'),
finish_date = now.strftime('%Y-%m-%d'),
start_time = now.strftime("%X"),
summit_time = now.strftime("%X"),
finish_time = now.strftime("%X"),
total_distance = 5,
notes = "Climbed a mountain"
)
self.assertEquals(test_climb, Climb.objects.get(mountain="Test Mountain"), "Mountain could not be created.")
def test_save_with_missing_field(self):
now = datetime.now()
try:
test_climb = Climb.objects.create(
climber = User.objects.get(username="test_user"),
mountain = Mountain.objects.get(name="Test Mountain"),
summit_date = now.strftime('%Y-%m-%d'),
finish_date = now.strftime('%Y-%m-%d'),
start_time = now.strftime("%X"),
summit_time = now.strftime("%X"),
finish_time = now.strftime("%X"),
total_distance = 5,
notes = "Climbed a mountain"
)
except:
test_climb = None
self.assertEquals(test_climb, None, "Climb should not save without missing field")
def test_save_with_invalid_date(self):
now = datetime.now()
try:
test_climb = Climb.objects.create(
climber = User.objects.get(username="test_user"),
mountain = Mountain.objects.get(name="Test Mountain"),
summit_date = now.isoformat(),
finish_date = now.strftime('%Y-%m-%d'),
start_time = now.strftime("%X"),
summit_time = now.strftime("%X"),
finish_time = now.strftime("%X"),
total_distance = 5,
notes = "Climbed a mountain"
)
except:
test_climb = None
self.assertEquals(test_climb, None, "Climb should not save with incorrect date format")
|
tiradoe/fourteeners-updated
|
fourteeners/apps/mountains/tests.py
|
Python
|
mit
| 2,841
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Interpreter version: python 2.7
#
import os.path
import pytest
import abclinuxuapi
from abclinuxuapi import shared
@pytest.fixture
def bp_url():
return "http://www.abclinuxu.cz/blog/bystroushaak/2015/2/bolest-proxy"
@pytest.fixture
def do_that_fucking_monkey_patch(monkeypatch):
def mock_download(*args, **kwargs):
fn = os.path.join(os.path.dirname(__file__), "mock_data/blogpost.html")
with open(fn) as f:
return f.read()
monkeypatch.setattr(abclinuxuapi.blogpost, "download", mock_download)
def setup_module(do_that_fucking_monkey_patch):
"""
It is not possiblel to import monkeypatch from pytest. You have to use it
as fixture.
"""
BPOST = abclinuxuapi.Blogpost(bp_url(), lazy=False)
@pytest.fixture
def bpost():
"""
This may seem a little bit crazy, but this speeds up the testing 6x.
I don't need new object for each test.
"""
return BPOST
def test_constructor(bp_url):
bp = abclinuxuapi.Blogpost(bp_url)
assert bp.url == bp_url
assert bp.uid is None
assert bp.title is None
assert bp.intro is None
assert bp.text is None
assert bp.rating is None
assert bp.comments == []
assert bp.comments_n == -1
assert bp.created_ts is None
assert bp.last_modified_ts is None
assert bp.object_ts > 0
def test_constructor_multi_params(bp_url):
bp = abclinuxuapi.Blogpost(
url=bp_url,
uid="uid",
title="title",
intro="intro",
text="text",
rating="rating",
comments="comments",
comments_n="comments_n",
created_ts="created_ts",
last_modified_ts="last_modified_ts",
object_ts="object_ts"
)
assert bp.url == bp_url
assert bp.uid == "uid"
assert bp.title == "title"
assert bp.intro == "intro"
assert bp.text == "text"
assert bp.rating == "rating"
assert bp.comments == "comments"
assert bp.comments_n == "comments_n"
assert bp.created_ts == "created_ts"
assert bp.last_modified_ts == "last_modified_ts"
assert bp.object_ts == "object_ts"
def test_constructor_wrong_params(bp_url):
with pytest.raises(TypeError):
bp = abclinuxuapi.Blogpost(bp_url, azgabash=True)
def test_get_title(bpost):
assert bpost.title == "Bolest proxy"
def test_get_text(bpost):
assert bpost.text.startswith("<h2>Bolest proxy</h2>")
assert "Written in CherryTree" in bpost.text
assert "bystrousak:" in bpost.text
def test_Tag():
tag = abclinuxuapi.Tag("hello", norm="_hello_")
assert tag == "hello"
assert tag.norm == "_hello_"
assert tag.url.startswith("http")
def test_tags(bpost):
assert bpost.tags
assert "proxy" in bpost.tags
# try to add and remove tag
new_tag = abclinuxuapi.Tag("nábytek", "nabytek")
bpost.remove_tag(new_tag, throw=False)
assert new_tag not in bpost.tags
bpost.add_tag(new_tag)
assert new_tag in bpost.tags
bpost.remove_tag(new_tag, throw=False)
def test_get_uid(bpost):
assert bpost.uid == 400957
def test_get_rating(bpost):
assert bpost.rating
assert bpost.rating.rating == 100
assert bpost.rating.base == 15
def test_meta_parsing(bpost):
assert bpost.has_tux
assert bpost.created_ts == 1423587660.0
assert bpost.last_modified_ts >= 1423591140.0
assert bpost.readed >= 1451
def test_get_image_urls(bpost):
assert bpost.get_image_urls()
assert bpost.get_image_urls()[0] == (
"https://www.abclinuxu.cz/images/screenshots/0/9/"
"210590-bolest-proxy-6017333664768008869.png"
)
def test_different_date_parsing():
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/clanky/yubikey.-co-to-je-a-co-to-umi-1",
lazy=False
)
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/clanky/bezpecnost/ssl-je-vase-bezpecne-pripojeni-opravdu-zabezpecene",
lazy=False
)
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/blog/jarasa/2016/10/i-pejsek-musi-jist-kvalitne",
lazy=False
)
abclinuxuapi.Blogpost(
"http://abclinuxu.cz/blog/msk/2016/8/hlada-sa-linux-embedded-vyvojar",
lazy=False
)
blog = abclinuxuapi.Blogpost(
"http://abclinuxu.cz/blog/Strider_BSD_koutek/2006/8/objevil-jsem-ameriku",
lazy=False
)
assert len(blog.comments) == 0
blog = abclinuxuapi.Blogpost(
"http://www.abclinuxu.cz/blog/tucnak_viktor/2005/1/zdravim-nahodne-navstevniky",
lazy=False
)
blog = abclinuxuapi.Blogpost(
"https://www.abclinuxu.cz/blog/luv/2016/4/mockgeofix-mock-geolokace-kompatibilni-s-android-emulatorem",
lazy=False
)
assert len(blog.comments) == 0
|
Bystroushaak/abclinuxuapi
|
tests/test_blogpost.py
|
Python
|
mit
| 4,767
|
from django.conf.urls import url
from .views import list_products
app_name = "producto"
urlpatterns = [
url(r'list-productos/$', list_products, name='list_products'),
]
|
andree1320z/deport-upao-web
|
deport_upao/apps/productos/urls.py
|
Python
|
mit
| 176
|
#
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
import ClientAPI
class AnimationState:
#
# Constructor
#
def __init__(self):
assert False
#
# Property Getters
#
def _get_Time(self):
return self._state.State.Time
def __getattr__(self, attrname):
if attrname in self._getters:
return self._getters[attrname](self)
else:
raise AttributeError, attrname
#
# Property Setters
#
def __setattr__(self, attrname, value):
if attrname in self._setters:
self._setters[attrname](self, value)
else:
raise AttributeError, attrname
_getters = { 'Time' : _get_Time }
_setters = { }
#
# Methods
#
def AddTime(self, t):
return self._state.AddTime(t)
def RegisterTimeEventHandler(self, time, handler):
AnimationStateEventWrapper(self, handler, time)
#
# This class is just another way of making an AnimationState, with a different constructor,
# since we don't have constructor overloading within a single class. This should only
# be used internally by the API.
#
class _ExistingAnimationState(AnimationState):
#
# Constructor
#
def __init__(self, state):
self.__dict__['_state'] = state
def __setattr__(self, attrname, value):
AnimationState.__setattr__(self, attrname, value)
class AnimationStateEventWrapper:
def __init__(self, state, handler, triggerTime):
self.animState = state
self.realHandler = handler
state._state.RegisterTimeEventHandler(triggerTime, self.Handler)
def Handler(self, axiomState, triggerTime):
self.realHandler(self.animState, triggerTime)
|
longde123/MultiversePlatform
|
client/Scripts/AnimationState.py
|
Python
|
mit
| 2,968
|
from classes import wunderpy_wrapper
from classes import grocerylist
from classes import grocerystore
wp = wunderpy_wrapper.wunderpy_wrapper('../data/tokens.csv')
obj = wp.get_task_positions_obj(wp.WUNDERLIST_GROCERY)
grocery_store = grocerystore.groceryStore('../data/store_order_zehrs.csv', '../data/ingredient_categories.csv') # use the default zehrs store; good enough
groceries = grocerylist.groceryList(wp.WUNDERLIST_GROCERY, wp)
groceries.get_tasks()
groceries.get_category_for_element(groceries.grocery_list[0], grocery_store)
groceries.get_categories(grocery_store)
groceries.reorder_list(wp)
# wp.update_list_order(groceries.wunderlist_order_obj)
# TODO check reloading of a list when you enter the right sheet
# TODO sort by cat order value, not cat id.
print('done')
|
briancousins/RecipeBook
|
tests/test_grocerylist.py
|
Python
|
mit
| 784
|
#!/usr/bin/env python
'''Test framework for pyglet. Reads details of components and capabilities
from a requirements document, runs the appropriate unit tests.
How to Run the Tests
--------------------
::
python tests/test.py top app graphics clock resource # these all run automatically
python tests/test.py font media text
python tests/test.py image
python tests/test.py window
Because the tests are interactive, they can take quite a while to complete. The
'window' section in particular takes a long time. It can be frustrating to get
almost through the tests and then something gets messed up, so we suggest you
run the tests in sections as listed above. If you are curious, the sections are
defined in tests/plan.txt.
Here are the different sections and how long they take.
=========== ===========
Section Time to Run
=========== ===========
top automatic
app automatic
graphics automatic
clock automatic
resource automatic
font 1 minute
media 1 minute
text 1 minute
image 5 minutes
window 10 minutes
=========== ===========
Overview
--------
First, some definitions:
Test case:
A single test, implemented by a Python module in the tests/ directory.
Tests can be interactive (requiring the user to pass or fail them) or
non-interactive (the test passes or fails itself).
Section:
A list of test cases to be run in a specified order. Sections can
also contain other sections to an arbitrary level.
Capability:
A capability is a tag that can be applied to a test-case, which specifies
a particular instance of the test. The tester can select which
capabilities are present on their system; and only test cases matching
those capabilities will be run.
There are platform capabilities "WIN", "OSX" and "X11", which are
automatically selected by default.
The "DEVELOPER" capability is used to mark test cases which test a feature
under active development.
The "GENERIC" capability signifies that the test case is equivalent under
all platforms, and is selected by default.
Other capabilities can be specified and selected as needed. For example,
we may wish to use an "NVIDIA" or "ATI" capability to specialise a
test-case for a particular video card make.
Some tests generate regression images if enabled, so you will only
need to run through the interactive procedure once. During
subsequent runs the image shown on screen will be compared with the
regression images and passed automatically if they match. There are
command line options for enabling this feature.Literal block
By default regression images are saved in tests/regression/images/
Running tests
-------------
The test procedure is interactive (this is necessary to facilitate the
many GUI-related tests, which cannot be completely automated). With no
command-line arguments, all test cases in all sections will be run::
python tests/test.py
Before each test, a description of the test will be printed, including
some information of what you should look for, and what interactivityLiteral block
is provided (including how to stop the test). Press ENTER to begin
the test.
When the test is complete, assuming there were no detectable errors
(for example, failed assertions or an exception), you will be asked
to enter a [P]ass or [F]ail. You should Fail the test if the behaviour
was not as described, and enter a short reason.
Details of each test session are logged for future use.
Command-line options:
`--plan=`
Specify the test plan file (defaults to tests/plan.txt)
`--test-root=`
Specify the top-level directory to look for unit tests in (defaults
to test/)
`--capabilities=`
Specify the capabilities to select, comma separated. By default this
only includes your operating system capability (X11, WIN or OSX) and
GENERIC.
`--log-level=`
Specify the minimum log level to write (defaults to 20: info)
`--log-file=`
Specify log file to write to (defaults to "pyglet.%d.log")
`--regression-capture`
Save regression images to disk. Use this only if the tests have
already been shown to pass.
`--regression-check`
Look for a regression image on disk instead of prompting the user for
passage. If a regression image is found, it is compared with the test
case using the tolerance specified below. Recommended only for
developers.
`--regression-tolerance=`
Specify the tolerance when comparing a regression image. A value of
2, for example, means each sample component must be +/- 2 units
of the regression image. Tolerance of 0 means images must be identical,
tolerance of 256 means images will always match (if correct dimensions).
Defaults to 2.
`--regression-path=`
Specify the directory to store and look for regression images.
Defaults to tests/regression/images/
`--developer`
Selects the DEVELOPER capability.
`--no-interactive=`
Don't write descriptions or prompt for confirmation; just run each
test in succcession.
After the command line options, you can specify a list of sections or test
cases to run.
Examples
--------
python tests/test.py --capabilities=GENERIC,NVIDIA,WIN window
Runs all tests in the window section with the given capabilities.
Test just the FULLSCREEN_TOGGLE test case without prompting for input (useful
for development).
python tests/image/PIL_RGBA_SAVE.py
Run a single test outside of the test harness. Handy for development; it
is equivalent to specifying --no-interactive.
Writing tests
-------------
Add the test case to the appropriate section in the test plan (plan.txt).
Create one unit test script per test case. For example, the test for
window.FULLSCREEN_TOGGLE is located at::
tests/window/FULLSCREEN_TOGGLE.py
The test file must contain:
- A module docstring describing what the test does and what the user should
look for.
- One or more subclasses of unittest.TestCase.
- No other module-level code, except perhaps an if __name__ == '__main__'
condition for running tests stand-alone.
- Optionally, the attribute "__noninteractive = True" to specify that
the test is not interactive; doesn't require user intervention.
During development, test cases should be marked with DEVELOPER. Once finished
add the WIN, OSX and X11 capabilities, or GENERIC if it's platform
independent.
Writing regression tests
------------------------
Your test case should subclass tests.regression.ImageRegressionTestCase
instead of unitttest.TestCase. At the point where the buffer (window
image) should be checked/saved, call self.capture_regression_image().
If this method returns True, you can exit straight away (regression
test passed), otherwise continue running interactively (regression image
was captured, wait for user confirmation). You can call
capture_regression_image() several times; only the final image will be
used.
Python 3
--------
The tests have to be processed by 2to3 in order to run them with Python 3.
This can be done with::
2to3 --output-dir=tests3 -W -n tests
And then run the tests int tests3 directory.
'''
from __future__ import print_function
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
import array
import logging
import os
import optparse
import re
import sys
import time
import unittest
# So we can find tests.regression and ensure local pyglet copy is tested.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from pyglet import compat_platform
import tests.regression
import pyglet.image
regressions_path = os.path.join(os.path.dirname(__file__),
'regression', 'images')
class TestCase(object):
def __init__(self, name):
self.name = name
self.short_name = name.split('.')[-1]
self.capabilities = set()
def get_module_filename(self, root=''):
path = os.path.join(*self.name.split('.'))
return '%s.py' % os.path.join(root, path)
def get_module(self, root=''):
name = 'tests.%s' % self.name
module = __import__(name)
for c in name.split('.')[1:]:
module = getattr(module, c)
return module
def get_regression_image_filename(self):
return os.path.join(regressions_path, '%s.png' % self.name)
def test(self, options):
options.tests_count += 1
if not options.capabilities.intersection(self.capabilities):
options.tests_skipped += 1
options.log.debug('Capabilities mismatch. Skipping %s', self)
return
options.log.info('--- test (%d/%d) %s',
options.tests_count, options.num_tests, self)
if options.pretend:
return
module = None
try:
module = self.get_module(options.test_root)
except IOError:
options.log.warning('No test exists for %s', self)
except Exception:
options.log.exception('Cannot load test for %s', self)
if not module:
return
module_interactive = options.interactive
if hasattr(module, '__noninteractive') and \
getattr(module, '__noninteractive'):
module_interactive = False
if options.regression_check and \
os.path.exists(self.get_regression_image_filename()):
result = RegressionCheckTestResult(
self, options.regression_tolerance)
module_interactive = False
elif options.regression_capture:
result = RegressionCaptureTestResult(self)
else:
result = StandardTestResult(self)
print('-' * 78)
print("Running Test: %s (%d/%d)\n" % (self, options.tests_count, options.num_tests))
if module.__doc__:
print(' ' + module.__doc__.replace('\n','\n '))
if module_interactive:
raw_input('Press return to begin test...')
suite = unittest.TestLoader().loadTestsFromModule(module)
options.log.info('Begin unit tests for %s', self)
suite(result)
for failure in result.failures:
options.log.error('Failure in %s', self)
options.log.error(failure[1])
for error in result.errors:
options.log.error('Error in %s', self)
options.log.error(error[1])
options.log.info('%d tests run', result.testsRun)
num_failures = len(result.failures)
num_errors = len(result.errors)
if num_failures or num_errors:
print('%d Failures and %d Errors detected.' % (num_failures, num_errors))
if (module_interactive and
len(result.failures) == 0 and
len(result.errors) == 0):
# print(module.__doc__)
user_result = raw_input('Passed [Yn]: ')
while user_result and user_result not in 'YyNn':
print("Unrecognized response '%s'" % user_result)
user_result = raw_input('Passed [Yn]: ')
if user_result and user_result in 'Nn':
print('Enter failure description: ')
description = raw_input('> ')
options.log.error('User marked fail for %s', self)
options.log.error(description)
else:
options.log.info('User marked pass for %s', self)
result.setUserPass()
def __repr__(self):
return 'TestCase(%s)' % self.name
def __str__(self):
return self.name
def __cmp__(self, other):
return cmp(str(self), str(other))
def num_tests(self):
return 1
class TestSection(object):
def __init__(self, name):
self.name = name
self.children = []
def add(self, child):
# child can be TestSection or TestCase
self.children.append(child)
def test(self, options):
for child in self.children:
child.test(options)
def __repr__(self):
return 'TestSection(%s)' % self.name
def num_tests(self):
return sum([c.num_tests() for c in self.children])
class TestPlan(object):
def __init__(self):
self.root = None
self.names = {}
@classmethod
def from_file(cls, file):
plan = TestPlan()
plan.root = TestSection('{root}')
plan.root.indent = None
# Section stack
sections = [plan.root]
if not hasattr(file, 'read'):
file = open(file, 'r')
line_number = 0
for line in file:
line_number += 1
# Skip empty lines
if not line.strip():
continue
# Skip comments
if line[0] == '#':
continue
indent = len(line) - len(line.lstrip())
while (sections and sections[-1].indent and
sections[-1].indent > indent):
sections.pop()
if sections[-1].indent is None:
sections[-1].indent = indent
if sections[-1].indent != indent:
raise Exception('Indentation mismatch line %d' % line_number)
if '.' in line:
tokens = line.strip().split()
test_case = TestCase(tokens[0])
test_case.capabilities = set(tokens[1:])
sections[-1].add(test_case)
plan.names[test_case.name] = test_case
plan.names[test_case.short_name] = test_case
else:
section = TestSection(line.strip())
section.indent = None
sections[-1].add(section)
sections.append(section)
plan.names[section.name] = section
return plan
def run(self, options, names=[]):
if not names:
components = [self.root]
else:
components = []
for name in names:
if name not in self.names:
options.log.error('Unknown test case or section "%s"', name)
return False
else:
components.append(self.names[name])
options.num_tests = sum([c.num_tests() for c in components])
options.tests_count = 0
options.tests_skipped = 0
for component in components:
component.test(options)
print('-' * 78)
return True
class StandardTestResult(unittest.TestResult):
def __init__(self, component):
super(StandardTestResult, self).__init__()
def setUserPass(self):
pass
class RegressionCaptureTestResult(unittest.TestResult):
def __init__(self, component):
super(RegressionCaptureTestResult, self).__init__()
self.component = component
self.captured_image = None
def startTest(self, test):
super(RegressionCaptureTestResult, self).startTest(test)
if isinstance(test, tests.regression.ImageRegressionTestCase):
test._enable_regression_image = True
def addSuccess(self, test):
super(RegressionCaptureTestResult, self).addSuccess(test)
assert self.captured_image is None
if isinstance(test, tests.regression.ImageRegressionTestCase):
self.captured_image = test._captured_image
def setUserPass(self):
if self.captured_image:
filename = self.component.get_regression_image_filename()
self.captured_image.save(filename)
logging.getLogger().info('Wrote regression image %s' % filename)
class Regression(Exception):
pass
def buffer_equal(a, b, tolerance=0):
if tolerance == 0:
return a == b
if len(a) != len(b):
return False
a = array.array('B', a)
b = array.array('B', b)
for i in range(len(a)):
if abs(a[i] - b[i]) > tolerance:
return False
return True
class RegressionCheckTestResult(unittest.TestResult):
def __init__(self, component, tolerance):
super(RegressionCheckTestResult, self).__init__()
self.filename = component.get_regression_image_filename()
self.regression_image = pyglet.image.load(self.filename)
self.tolerance = tolerance
def startTest(self, test):
super(RegressionCheckTestResult, self).startTest(test)
if isinstance(test, tests.regression.ImageRegressionTestCase):
test._enable_regression_image = True
test._enable_interactive = False
logging.getLogger().info('Using regression %s' % self.filename)
def addSuccess(self, test):
# Check image
ref_image = self.regression_image.image_data
this_image = test._captured_image.image_data
this_image.format = ref_image.format
this_image.pitch = ref_image.pitch
if this_image.width != ref_image.width:
self.addFailure(test,
'Buffer width does not match regression image')
elif this_image.height != ref_image.height:
self.addFailure(test,
'Buffer height does not match regression image')
elif not buffer_equal(this_image.data, ref_image.data,
self.tolerance):
self.addFailure(test,
'Buffer does not match regression image')
else:
super(RegressionCheckTestResult, self).addSuccess(test)
def addFailure(self, test, err):
err = Regression(err)
super(RegressionCheckTestResult, self).addFailure(test, (Regression,
err, []))
def main():
capabilities = ['GENERIC']
platform_capabilities = {
'linux': 'X11',
'linux2': 'X11',
'linux3': 'X11',
'linux-compat': 'X11',
'win32': 'WIN',
'cygwin': 'WIN',
'darwin': 'OSX'
}
if compat_platform in platform_capabilities:
capabilities.append(platform_capabilities[compat_platform])
script_root = os.path.dirname(__file__)
plan_filename = os.path.normpath(os.path.join(script_root, 'plan.txt'))
test_root = script_root
op = optparse.OptionParser()
op.usage = 'test.py [options] [components]'
op.add_option('--plan', help='test plan file', default=plan_filename)
op.add_option('--test-root', default=script_root,
help='directory containing test cases')
op.add_option('--capabilities', help='selected test capabilities',
default=','.join(capabilities))
op.add_option('--log-level', help='verbosity of logging',
default=20, type='int')
op.add_option('--log-file', help='log to FILE', metavar='FILE',
default='pyglet.%d.log')
op.add_option('--regression-path', metavar='DIR', default=regressions_path,
help='locate regression images in DIR')
op.add_option('--regression-tolerance', type='int', default=2,
help='tolerance for comparing regression images')
op.add_option('--regression-check', action='store_true',
help='enable image regression checks')
op.add_option('--regression-capture', action='store_true',
help='enable image regression capture')
op.add_option('--no-interactive', action='store_false', default=True,
dest='interactive', help='disable interactive prompting')
op.add_option('--developer', action='store_true',
help='add DEVELOPER capability')
op.add_option('--pretend', action='store_true',
help='print selected test cases only')
options, args = op.parse_args()
options.capabilities = set(options.capabilities.split(','))
if options.developer:
options.capabilities.add('DEVELOPER')
if options.regression_capture:
try:
os.makedirs(regressions_path)
except OSError:
pass
if '%d' in options.log_file:
i = 1
while os.path.exists(options.log_file % i):
i += 1
options.log_file = options.log_file % i
print('Test results are saved in log file:', options.log_file)
logging.basicConfig(filename=options.log_file, level=options.log_level, format='%(levelname)s %(message)s')
options.log = logging.getLogger()
options.log.info('Beginning test at %s', time.ctime())
options.log.info('Capabilities are: %s', ', '.join(options.capabilities))
options.log.info('sys.platform = %s', sys.platform)
options.log.info('pyglet.version = %s', pyglet.version)
options.log.info('pyglet.platform = %s', pyglet.platform)
options.log.info('Reading test plan from %s', options.plan)
plan = TestPlan.from_file(options.plan)
if not plan.run(options, args):
options.log.error('Test run failed.')
print('Test results are saved in log file:', options.log_file)
if __name__ == '__main__':
main()
|
vickenty/ookoobah
|
pyglet-c9188efc2e30/tests/test.py
|
Python
|
mit
| 20,890
|
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""A clone of the Music Player Daemon (MPD) that plays music from a
Beets library. Attempts to implement a compatible protocol to allow
use of the wide range of MPD clients.
"""
from __future__ import division, absolute_import, print_function
import re
from string import Template
import traceback
import random
import time
import beets
from beets.plugins import BeetsPlugin
import beets.ui
from beets import logging
from beets import vfs
from beets.util import bluelet
from beets.library import Item
from beets import dbcore
from beets.mediafile import MediaFile
import six
PROTOCOL_VERSION = '0.13.0'
BUFSIZE = 1024
HELLO = 'OK MPD %s' % PROTOCOL_VERSION
CLIST_BEGIN = 'command_list_begin'
CLIST_VERBOSE_BEGIN = 'command_list_ok_begin'
CLIST_END = 'command_list_end'
RESP_OK = 'OK'
RESP_CLIST_VERBOSE = 'list_OK'
RESP_ERR = 'ACK'
NEWLINE = u"\n"
ERROR_NOT_LIST = 1
ERROR_ARG = 2
ERROR_PASSWORD = 3
ERROR_PERMISSION = 4
ERROR_UNKNOWN = 5
ERROR_NO_EXIST = 50
ERROR_PLAYLIST_MAX = 51
ERROR_SYSTEM = 52
ERROR_PLAYLIST_LOAD = 53
ERROR_UPDATE_ALREADY = 54
ERROR_PLAYER_SYNC = 55
ERROR_EXIST = 56
VOLUME_MIN = 0
VOLUME_MAX = 100
SAFE_COMMANDS = (
# Commands that are available when unauthenticated.
u'close', u'commands', u'notcommands', u'password', u'ping',
)
ITEM_KEYS_WRITABLE = set(MediaFile.fields()).intersection(Item._fields.keys())
# Loggers.
log = logging.getLogger('beets.bpd')
global_log = logging.getLogger('beets')
# Gstreamer import error.
class NoGstreamerError(Exception):
pass
# Error-handling, exceptions, parameter parsing.
class BPDError(Exception):
"""An error that should be exposed to the client to the BPD
server.
"""
def __init__(self, code, message, cmd_name='', index=0):
self.code = code
self.message = message
self.cmd_name = cmd_name
self.index = index
template = Template(u'$resp [$code@$index] {$cmd_name} $message')
def response(self):
"""Returns a string to be used as the response code for the
erring command.
"""
return self.template.substitute({
'resp': RESP_ERR,
'code': self.code,
'index': self.index,
'cmd_name': self.cmd_name,
'message': self.message,
})
def make_bpd_error(s_code, s_message):
"""Create a BPDError subclass for a static code and message.
"""
class NewBPDError(BPDError):
code = s_code
message = s_message
cmd_name = ''
index = 0
def __init__(self):
pass
return NewBPDError
ArgumentTypeError = make_bpd_error(ERROR_ARG, u'invalid type for argument')
ArgumentIndexError = make_bpd_error(ERROR_ARG, u'argument out of range')
ArgumentNotFoundError = make_bpd_error(ERROR_NO_EXIST, u'argument not found')
def cast_arg(t, val):
"""Attempts to call t on val, raising a ArgumentTypeError
on ValueError.
If 't' is the special string 'intbool', attempts to cast first
to an int and then to a bool (i.e., 1=True, 0=False).
"""
if t == 'intbool':
return cast_arg(bool, cast_arg(int, val))
else:
try:
return t(val)
except ValueError:
raise ArgumentTypeError()
class BPDClose(Exception):
"""Raised by a command invocation to indicate that the connection
should be closed.
"""
# Generic server infrastructure, implementing the basic protocol.
class BaseServer(object):
"""A MPD-compatible music player server.
The functions with the `cmd_` prefix are invoked in response to
client commands. For instance, if the client says `status`,
`cmd_status` will be invoked. The arguments to the client's commands
are used as function arguments following the connection issuing the
command. The functions may send data on the connection. They may
also raise BPDError exceptions to report errors.
This is a generic superclass and doesn't support many commands.
"""
def __init__(self, host, port, password):
"""Create a new server bound to address `host` and listening
on port `port`. If `password` is given, it is required to do
anything significant on the server.
"""
self.host, self.port, self.password = host, port, password
# Default server values.
self.random = False
self.repeat = False
self.volume = VOLUME_MAX
self.crossfade = 0
self.playlist = []
self.playlist_version = 0
self.current_index = -1
self.paused = False
self.error = None
# Object for random numbers generation
self.random_obj = random.Random()
def run(self):
"""Block and start listening for connections from clients. An
interrupt (^C) closes the server.
"""
self.startup_time = time.time()
bluelet.run(bluelet.server(self.host, self.port,
Connection.handler(self)))
def _item_info(self, item):
"""An abstract method that should response lines containing a
single song's metadata.
"""
raise NotImplementedError
def _item_id(self, item):
"""An abstract method returning the integer id for an item.
"""
raise NotImplementedError
def _id_to_index(self, track_id):
"""Searches the playlist for a song with the given id and
returns its index in the playlist.
"""
track_id = cast_arg(int, track_id)
for index, track in enumerate(self.playlist):
if self._item_id(track) == track_id:
return index
# Loop finished with no track found.
raise ArgumentNotFoundError()
def _random_idx(self):
"""Returns a random index different from the current one.
If there are no songs in the playlist it returns -1.
If there is only one song in the playlist it returns 0.
"""
if len(self.playlist) < 2:
return len(self.playlist) - 1
new_index = self.random_obj.randint(0, len(self.playlist) - 1)
while new_index == self.current_index:
new_index = self.random_obj.randint(0, len(self.playlist) - 1)
return new_index
def _succ_idx(self):
"""Returns the index for the next song to play.
It also considers random and repeat flags.
No boundaries are checked.
"""
if self.repeat:
return self.current_index
if self.random:
return self._random_idx()
return self.current_index + 1
def _prev_idx(self):
"""Returns the index for the previous song to play.
It also considers random and repeat flags.
No boundaries are checked.
"""
if self.repeat:
return self.current_index
if self.random:
return self._random_idx()
return self.current_index - 1
def cmd_ping(self, conn):
"""Succeeds."""
pass
def cmd_kill(self, conn):
"""Exits the server process."""
exit(0)
def cmd_close(self, conn):
"""Closes the connection."""
raise BPDClose()
def cmd_password(self, conn, password):
"""Attempts password authentication."""
if password == self.password:
conn.authenticated = True
else:
conn.authenticated = False
raise BPDError(ERROR_PASSWORD, u'incorrect password')
def cmd_commands(self, conn):
"""Lists the commands available to the user."""
if self.password and not conn.authenticated:
# Not authenticated. Show limited list of commands.
for cmd in SAFE_COMMANDS:
yield u'command: ' + cmd
else:
# Authenticated. Show all commands.
for func in dir(self):
if func.startswith('cmd_'):
yield u'command: ' + func[4:]
def cmd_notcommands(self, conn):
"""Lists all unavailable commands."""
if self.password and not conn.authenticated:
# Not authenticated. Show privileged commands.
for func in dir(self):
if func.startswith('cmd_'):
cmd = func[4:]
if cmd not in SAFE_COMMANDS:
yield u'command: ' + cmd
else:
# Authenticated. No commands are unavailable.
pass
def cmd_status(self, conn):
"""Returns some status information for use with an
implementation of cmd_status.
Gives a list of response-lines for: volume, repeat, random,
playlist, playlistlength, and xfade.
"""
yield (
u'volume: ' + six.text_type(self.volume),
u'repeat: ' + six.text_type(int(self.repeat)),
u'random: ' + six.text_type(int(self.random)),
u'playlist: ' + six.text_type(self.playlist_version),
u'playlistlength: ' + six.text_type(len(self.playlist)),
u'xfade: ' + six.text_type(self.crossfade),
)
if self.current_index == -1:
state = u'stop'
elif self.paused:
state = u'pause'
else:
state = u'play'
yield u'state: ' + state
if self.current_index != -1: # i.e., paused or playing
current_id = self._item_id(self.playlist[self.current_index])
yield u'song: ' + six.text_type(self.current_index)
yield u'songid: ' + six.text_type(current_id)
if self.error:
yield u'error: ' + self.error
def cmd_clearerror(self, conn):
"""Removes the persistent error state of the server. This
error is set when a problem arises not in response to a
command (for instance, when playing a file).
"""
self.error = None
def cmd_random(self, conn, state):
"""Set or unset random (shuffle) mode."""
self.random = cast_arg('intbool', state)
def cmd_repeat(self, conn, state):
"""Set or unset repeat mode."""
self.repeat = cast_arg('intbool', state)
def cmd_setvol(self, conn, vol):
"""Set the player's volume level (0-100)."""
vol = cast_arg(int, vol)
if vol < VOLUME_MIN or vol > VOLUME_MAX:
raise BPDError(ERROR_ARG, u'volume out of range')
self.volume = vol
def cmd_crossfade(self, conn, crossfade):
"""Set the number of seconds of crossfading."""
crossfade = cast_arg(int, crossfade)
if crossfade < 0:
raise BPDError(ERROR_ARG, u'crossfade time must be nonnegative')
def cmd_clear(self, conn):
"""Clear the playlist."""
self.playlist = []
self.playlist_version += 1
self.cmd_stop(conn)
def cmd_delete(self, conn, index):
"""Remove the song at index from the playlist."""
index = cast_arg(int, index)
try:
del(self.playlist[index])
except IndexError:
raise ArgumentIndexError()
self.playlist_version += 1
if self.current_index == index: # Deleted playing song.
self.cmd_stop(conn)
elif index < self.current_index: # Deleted before playing.
# Shift playing index down.
self.current_index -= 1
def cmd_deleteid(self, conn, track_id):
self.cmd_delete(conn, self._id_to_index(track_id))
def cmd_move(self, conn, idx_from, idx_to):
"""Move a track in the playlist."""
idx_from = cast_arg(int, idx_from)
idx_to = cast_arg(int, idx_to)
try:
track = self.playlist.pop(idx_from)
self.playlist.insert(idx_to, track)
except IndexError:
raise ArgumentIndexError()
# Update currently-playing song.
if idx_from == self.current_index:
self.current_index = idx_to
elif idx_from < self.current_index <= idx_to:
self.current_index -= 1
elif idx_from > self.current_index >= idx_to:
self.current_index += 1
self.playlist_version += 1
def cmd_moveid(self, conn, idx_from, idx_to):
idx_from = self._id_to_index(idx_from)
return self.cmd_move(conn, idx_from, idx_to)
def cmd_swap(self, conn, i, j):
"""Swaps two tracks in the playlist."""
i = cast_arg(int, i)
j = cast_arg(int, j)
try:
track_i = self.playlist[i]
track_j = self.playlist[j]
except IndexError:
raise ArgumentIndexError()
self.playlist[j] = track_i
self.playlist[i] = track_j
# Update currently-playing song.
if self.current_index == i:
self.current_index = j
elif self.current_index == j:
self.current_index = i
self.playlist_version += 1
def cmd_swapid(self, conn, i_id, j_id):
i = self._id_to_index(i_id)
j = self._id_to_index(j_id)
return self.cmd_swap(conn, i, j)
def cmd_urlhandlers(self, conn):
"""Indicates supported URL schemes. None by default."""
pass
def cmd_playlistinfo(self, conn, index=-1):
"""Gives metadata information about the entire playlist or a
single track, given by its index.
"""
index = cast_arg(int, index)
if index == -1:
for track in self.playlist:
yield self._item_info(track)
else:
try:
track = self.playlist[index]
except IndexError:
raise ArgumentIndexError()
yield self._item_info(track)
def cmd_playlistid(self, conn, track_id=-1):
return self.cmd_playlistinfo(conn, self._id_to_index(track_id))
def cmd_plchanges(self, conn, version):
"""Sends playlist changes since the given version.
This is a "fake" implementation that ignores the version and
just returns the entire playlist (rather like version=0). This
seems to satisfy many clients.
"""
return self.cmd_playlistinfo(conn)
def cmd_plchangesposid(self, conn, version):
"""Like plchanges, but only sends position and id.
Also a dummy implementation.
"""
for idx, track in enumerate(self.playlist):
yield u'cpos: ' + six.text_type(idx)
yield u'Id: ' + six.text_type(track.id)
def cmd_currentsong(self, conn):
"""Sends information about the currently-playing song.
"""
if self.current_index != -1: # -1 means stopped.
track = self.playlist[self.current_index]
yield self._item_info(track)
def cmd_next(self, conn):
"""Advance to the next song in the playlist."""
self.current_index = self._succ_idx()
if self.current_index >= len(self.playlist):
# Fallen off the end. Just move to stopped state.
return self.cmd_stop(conn)
else:
return self.cmd_play(conn)
def cmd_previous(self, conn):
"""Step back to the last song."""
self.current_index = self._prev_idx()
if self.current_index < 0:
return self.cmd_stop(conn)
else:
return self.cmd_play(conn)
def cmd_pause(self, conn, state=None):
"""Set the pause state playback."""
if state is None:
self.paused = not self.paused # Toggle.
else:
self.paused = cast_arg('intbool', state)
def cmd_play(self, conn, index=-1):
"""Begin playback, possibly at a specified playlist index."""
index = cast_arg(int, index)
if index < -1 or index > len(self.playlist):
raise ArgumentIndexError()
if index == -1: # No index specified: start where we are.
if not self.playlist: # Empty playlist: stop immediately.
return self.cmd_stop(conn)
if self.current_index == -1: # No current song.
self.current_index = 0 # Start at the beginning.
# If we have a current song, just stay there.
else: # Start with the specified index.
self.current_index = index
self.paused = False
def cmd_playid(self, conn, track_id=0):
track_id = cast_arg(int, track_id)
if track_id == -1:
index = -1
else:
index = self._id_to_index(track_id)
return self.cmd_play(conn, index)
def cmd_stop(self, conn):
"""Stop playback."""
self.current_index = -1
self.paused = False
def cmd_seek(self, conn, index, pos):
"""Seek to a specified point in a specified song."""
index = cast_arg(int, index)
if index < 0 or index >= len(self.playlist):
raise ArgumentIndexError()
self.current_index = index
def cmd_seekid(self, conn, track_id, pos):
index = self._id_to_index(track_id)
return self.cmd_seek(conn, index, pos)
def cmd_profile(self, conn):
"""Memory profiling for debugging."""
from guppy import hpy
heap = hpy().heap()
print(heap)
class Connection(object):
"""A connection between a client and the server. Handles input and
output from and to the client.
"""
def __init__(self, server, sock):
"""Create a new connection for the accepted socket `client`.
"""
self.server = server
self.sock = sock
self.authenticated = False
def send(self, lines):
"""Send lines, which which is either a single string or an
iterable consisting of strings, to the client. A newline is
added after every string. Returns a Bluelet event that sends
the data.
"""
if isinstance(lines, six.string_types):
lines = [lines]
out = NEWLINE.join(lines) + NEWLINE
log.debug('{}', out[:-1]) # Don't log trailing newline.
if isinstance(out, six.text_type):
out = out.encode('utf8')
return self.sock.sendall(out)
def do_command(self, command):
"""A coroutine that runs the given command and sends an
appropriate response."""
try:
yield bluelet.call(command.run(self))
except BPDError as e:
# Send the error.
yield self.send(e.response())
else:
# Send success code.
yield self.send(RESP_OK)
def run(self):
"""Send a greeting to the client and begin processing commands
as they arrive.
"""
yield self.send(HELLO)
clist = None # Initially, no command list is being constructed.
while True:
line = yield self.sock.readline()
if not line:
break
line = line.strip()
if not line:
break
log.debug('{}', line)
if clist is not None:
# Command list already opened.
if line == CLIST_END:
yield bluelet.call(self.do_command(clist))
clist = None # Clear the command list.
else:
clist.append(Command(line))
elif line == CLIST_BEGIN or line == CLIST_VERBOSE_BEGIN:
# Begin a command list.
clist = CommandList([], line == CLIST_VERBOSE_BEGIN)
else:
# Ordinary command.
try:
yield bluelet.call(self.do_command(Command(line)))
except BPDClose:
# Command indicates that the conn should close.
self.sock.close()
return
@classmethod
def handler(cls, server):
def _handle(sock):
"""Creates a new `Connection` and runs it.
"""
return cls(server, sock).run()
return _handle
class Command(object):
"""A command issued by the client for processing by the server.
"""
command_re = re.compile(br'^([^ \t]+)[ \t]*')
arg_re = re.compile(br'"((?:\\"|[^"])+)"|([^ \t"]+)')
def __init__(self, s):
"""Creates a new `Command` from the given string, `s`, parsing
the string for command name and arguments.
"""
command_match = self.command_re.match(s)
self.name = command_match.group(1)
self.args = []
arg_matches = self.arg_re.findall(s[command_match.end():])
for match in arg_matches:
if match[0]:
# Quoted argument.
arg = match[0]
arg = arg.replace(b'\\"', b'"').replace(b'\\\\', b'\\')
else:
# Unquoted argument.
arg = match[1]
arg = arg.decode('utf8')
self.args.append(arg)
def run(self, conn):
"""A coroutine that executes the command on the given
connection.
"""
# Attempt to get correct command function.
func_name = 'cmd_' + self.name
if not hasattr(conn.server, func_name):
raise BPDError(ERROR_UNKNOWN, u'unknown command', self.name)
func = getattr(conn.server, func_name)
# Ensure we have permission for this command.
if conn.server.password and \
not conn.authenticated and \
self.name not in SAFE_COMMANDS:
raise BPDError(ERROR_PERMISSION, u'insufficient privileges')
try:
args = [conn] + self.args
results = func(*args)
if results:
for data in results:
yield conn.send(data)
except BPDError as e:
# An exposed error. Set the command name and then let
# the Connection handle it.
e.cmd_name = self.name
raise e
except BPDClose:
# An indication that the connection should close. Send
# it on the Connection.
raise
except Exception as e:
# An "unintentional" error. Hide it from the client.
log.error('{}', traceback.format_exc(e))
raise BPDError(ERROR_SYSTEM, u'server error', self.name)
class CommandList(list):
"""A list of commands issued by the client for processing by the
server. May be verbose, in which case the response is delimited, or
not. Should be a list of `Command` objects.
"""
def __init__(self, sequence=None, verbose=False):
"""Create a new `CommandList` from the given sequence of
`Command`s. If `verbose`, this is a verbose command list.
"""
if sequence:
for item in sequence:
self.append(item)
self.verbose = verbose
def run(self, conn):
"""Coroutine executing all the commands in this list.
"""
for i, command in enumerate(self):
try:
yield bluelet.call(command.run(conn))
except BPDError as e:
# If the command failed, stop executing.
e.index = i # Give the error the correct index.
raise e
# Otherwise, possibly send the output delimeter if we're in a
# verbose ("OK") command list.
if self.verbose:
yield conn.send(RESP_CLIST_VERBOSE)
# A subclass of the basic, protocol-handling server that actually plays
# music.
class Server(BaseServer):
"""An MPD-compatible server using GStreamer to play audio and beets
to store its library.
"""
def __init__(self, library, host, port, password):
try:
from beetsplug.bpd import gstplayer
except ImportError as e:
# This is a little hacky, but it's the best I know for now.
if e.args[0].endswith(' gst'):
raise NoGstreamerError()
else:
raise
super(Server, self).__init__(host, port, password)
self.lib = library
self.player = gstplayer.GstPlayer(self.play_finished)
self.cmd_update(None)
def run(self):
self.player.run()
super(Server, self).run()
def play_finished(self):
"""A callback invoked every time our player finishes a
track.
"""
self.cmd_next(None)
# Metadata helper functions.
def _item_info(self, item):
info_lines = [
u'file: ' + item.destination(fragment=True),
u'Time: ' + six.text_type(int(item.length)),
u'Title: ' + item.title,
u'Artist: ' + item.artist,
u'Album: ' + item.album,
u'Genre: ' + item.genre,
]
track = six.text_type(item.track)
if item.tracktotal:
track += u'/' + six.text_type(item.tracktotal)
info_lines.append(u'Track: ' + track)
info_lines.append(u'Date: ' + six.text_type(item.year))
try:
pos = self._id_to_index(item.id)
info_lines.append(u'Pos: ' + six.text_type(pos))
except ArgumentNotFoundError:
# Don't include position if not in playlist.
pass
info_lines.append(u'Id: ' + six.text_type(item.id))
return info_lines
def _item_id(self, item):
return item.id
# Database updating.
def cmd_update(self, conn, path=u'/'):
"""Updates the catalog to reflect the current database state.
"""
# Path is ignored. Also, the real MPD does this asynchronously;
# this is done inline.
print(u'Building directory tree...')
self.tree = vfs.libtree(self.lib)
print(u'... done.')
self.updated_time = time.time()
# Path (directory tree) browsing.
def _resolve_path(self, path):
"""Returns a VFS node or an item ID located at the path given.
If the path does not exist, raises a
"""
components = path.split(u'/')
node = self.tree
for component in components:
if not component:
continue
if isinstance(node, int):
# We're trying to descend into a file node.
raise ArgumentNotFoundError()
if component in node.files:
node = node.files[component]
elif component in node.dirs:
node = node.dirs[component]
else:
raise ArgumentNotFoundError()
return node
def _path_join(self, p1, p2):
"""Smashes together two BPD paths."""
out = p1 + u'/' + p2
return out.replace(u'//', u'/').replace(u'//', u'/')
def cmd_lsinfo(self, conn, path=u"/"):
"""Sends info on all the items in the path."""
node = self._resolve_path(path)
if isinstance(node, int):
# Trying to list a track.
raise BPDError(ERROR_ARG, u'this is not a directory')
else:
for name, itemid in iter(sorted(node.files.items())):
item = self.lib.get_item(itemid)
yield self._item_info(item)
for name, _ in iter(sorted(node.dirs.items())):
dirpath = self._path_join(path, name)
if dirpath.startswith(u"/"):
# Strip leading slash (libmpc rejects this).
dirpath = dirpath[1:]
yield u'directory: %s' % dirpath
def _listall(self, basepath, node, info=False):
"""Helper function for recursive listing. If info, show
tracks' complete info; otherwise, just show items' paths.
"""
if isinstance(node, int):
# List a single file.
if info:
item = self.lib.get_item(node)
yield self._item_info(item)
else:
yield u'file: ' + basepath
else:
# List a directory. Recurse into both directories and files.
for name, itemid in sorted(node.files.items()):
newpath = self._path_join(basepath, name)
# "yield from"
for v in self._listall(newpath, itemid, info):
yield v
for name, subdir in sorted(node.dirs.items()):
newpath = self._path_join(basepath, name)
yield u'directory: ' + newpath
for v in self._listall(newpath, subdir, info):
yield v
def cmd_listall(self, conn, path=u"/"):
"""Send the paths all items in the directory, recursively."""
return self._listall(path, self._resolve_path(path), False)
def cmd_listallinfo(self, conn, path=u"/"):
"""Send info on all the items in the directory, recursively."""
return self._listall(path, self._resolve_path(path), True)
# Playlist manipulation.
def _all_items(self, node):
"""Generator yielding all items under a VFS node.
"""
if isinstance(node, int):
# Could be more efficient if we built up all the IDs and
# then issued a single SELECT.
yield self.lib.get_item(node)
else:
# Recurse into a directory.
for name, itemid in sorted(node.files.items()):
# "yield from"
for v in self._all_items(itemid):
yield v
for name, subdir in sorted(node.dirs.items()):
for v in self._all_items(subdir):
yield v
def _add(self, path, send_id=False):
"""Adds a track or directory to the playlist, specified by the
path. If `send_id`, write each item's id to the client.
"""
for item in self._all_items(self._resolve_path(path)):
self.playlist.append(item)
if send_id:
yield u'Id: ' + six.text_type(item.id)
self.playlist_version += 1
def cmd_add(self, conn, path):
"""Adds a track or directory to the playlist, specified by a
path.
"""
return self._add(path, False)
def cmd_addid(self, conn, path):
"""Same as `cmd_add` but sends an id back to the client."""
return self._add(path, True)
# Server info.
def cmd_status(self, conn):
for line in super(Server, self).cmd_status(conn):
yield line
if self.current_index > -1:
item = self.playlist[self.current_index]
yield u'bitrate: ' + six.text_type(item.bitrate / 1000)
# Missing 'audio'.
(pos, total) = self.player.time()
yield u'time: ' + six.text_type(pos) + u':' + six.text_type(total)
# Also missing 'updating_db'.
def cmd_stats(self, conn):
"""Sends some statistics about the library."""
with self.lib.transaction() as tx:
statement = 'SELECT COUNT(DISTINCT artist), ' \
'COUNT(DISTINCT album), ' \
'COUNT(id), ' \
'SUM(length) ' \
'FROM items'
artists, albums, songs, totaltime = tx.query(statement)[0]
yield (
u'artists: ' + six.text_type(artists),
u'albums: ' + six.text_type(albums),
u'songs: ' + six.text_type(songs),
u'uptime: ' + six.text_type(int(time.time() - self.startup_time)),
u'playtime: ' + u'0', # Missing.
u'db_playtime: ' + six.text_type(int(totaltime)),
u'db_update: ' + six.text_type(int(self.updated_time)),
)
# Searching.
tagtype_map = {
u'Artist': u'artist',
u'Album': u'album',
u'Title': u'title',
u'Track': u'track',
u'AlbumArtist': u'albumartist',
u'AlbumArtistSort': u'albumartist_sort',
# Name?
u'Genre': u'genre',
u'Date': u'year',
u'Composer': u'composer',
# Performer?
u'Disc': u'disc',
u'filename': u'path', # Suspect.
}
def cmd_tagtypes(self, conn):
"""Returns a list of the metadata (tag) fields available for
searching.
"""
for tag in self.tagtype_map:
yield u'tagtype: ' + tag
def _tagtype_lookup(self, tag):
"""Uses `tagtype_map` to look up the beets column name for an
MPD tagtype (or throw an appropriate exception). Returns both
the canonical name of the MPD tagtype and the beets column
name.
"""
for test_tag, key in self.tagtype_map.items():
# Match case-insensitively.
if test_tag.lower() == tag.lower():
return test_tag, key
raise BPDError(ERROR_UNKNOWN, u'no such tagtype')
def _metadata_query(self, query_type, any_query_type, kv):
"""Helper function returns a query object that will find items
according to the library query type provided and the key-value
pairs specified. The any_query_type is used for queries of
type "any"; if None, then an error is thrown.
"""
if kv: # At least one key-value pair.
queries = []
# Iterate pairwise over the arguments.
it = iter(kv)
for tag, value in zip(it, it):
if tag.lower() == u'any':
if any_query_type:
queries.append(any_query_type(value,
ITEM_KEYS_WRITABLE,
query_type))
else:
raise BPDError(ERROR_UNKNOWN, u'no such tagtype')
else:
_, key = self._tagtype_lookup(tag)
queries.append(query_type(key, value))
return dbcore.query.AndQuery(queries)
else: # No key-value pairs.
return dbcore.query.TrueQuery()
def cmd_search(self, conn, *kv):
"""Perform a substring match for items."""
query = self._metadata_query(dbcore.query.SubstringQuery,
dbcore.query.AnyFieldQuery,
kv)
for item in self.lib.items(query):
yield self._item_info(item)
def cmd_find(self, conn, *kv):
"""Perform an exact match for items."""
query = self._metadata_query(dbcore.query.MatchQuery,
None,
kv)
for item in self.lib.items(query):
yield self._item_info(item)
def cmd_list(self, conn, show_tag, *kv):
"""List distinct metadata values for show_tag, possibly
filtered by matching match_tag to match_term.
"""
show_tag_canon, show_key = self._tagtype_lookup(show_tag)
query = self._metadata_query(dbcore.query.MatchQuery, None, kv)
clause, subvals = query.clause()
statement = 'SELECT DISTINCT ' + show_key + \
' FROM items WHERE ' + clause + \
' ORDER BY ' + show_key
with self.lib.transaction() as tx:
rows = tx.query(statement, subvals)
for row in rows:
yield show_tag_canon + u': ' + six.text_type(row[0])
def cmd_count(self, conn, tag, value):
"""Returns the number and total time of songs matching the
tag/value query.
"""
_, key = self._tagtype_lookup(tag)
songs = 0
playtime = 0.0
for item in self.lib.items(dbcore.query.MatchQuery(key, value)):
songs += 1
playtime += item.length
yield u'songs: ' + six.text_type(songs)
yield u'playtime: ' + six.text_type(int(playtime))
# "Outputs." Just a dummy implementation because we don't control
# any outputs.
def cmd_outputs(self, conn):
"""List the available outputs."""
yield (
u'outputid: 0',
u'outputname: gstreamer',
u'outputenabled: 1',
)
def cmd_enableoutput(self, conn, output_id):
output_id = cast_arg(int, output_id)
if output_id != 0:
raise ArgumentIndexError()
def cmd_disableoutput(self, conn, output_id):
output_id = cast_arg(int, output_id)
if output_id == 0:
raise BPDError(ERROR_ARG, u'cannot disable this output')
else:
raise ArgumentIndexError()
# Playback control. The functions below hook into the
# half-implementations provided by the base class. Together, they're
# enough to implement all normal playback functionality.
def cmd_play(self, conn, index=-1):
new_index = index != -1 and index != self.current_index
was_paused = self.paused
super(Server, self).cmd_play(conn, index)
if self.current_index > -1: # Not stopped.
if was_paused and not new_index:
# Just unpause.
self.player.play()
else:
self.player.play_file(self.playlist[self.current_index].path)
def cmd_pause(self, conn, state=None):
super(Server, self).cmd_pause(conn, state)
if self.paused:
self.player.pause()
elif self.player.playing:
self.player.play()
def cmd_stop(self, conn):
super(Server, self).cmd_stop(conn)
self.player.stop()
def cmd_seek(self, conn, index, pos):
"""Seeks to the specified position in the specified song."""
index = cast_arg(int, index)
pos = cast_arg(int, pos)
super(Server, self).cmd_seek(conn, index, pos)
self.player.seek(pos)
# Volume control.
def cmd_setvol(self, conn, vol):
vol = cast_arg(int, vol)
super(Server, self).cmd_setvol(conn, vol)
self.player.volume = float(vol) / 100
# Beets plugin hooks.
class BPDPlugin(BeetsPlugin):
"""Provides the "beet bpd" command for running a music player
server.
"""
def __init__(self):
super(BPDPlugin, self).__init__()
self.config.add({
'host': u'',
'port': 6600,
'password': u'',
'volume': VOLUME_MAX,
})
self.config['password'].redact = True
def start_bpd(self, lib, host, port, password, volume, debug):
"""Starts a BPD server."""
if debug: # FIXME this should be managed by BeetsPlugin
self._log.setLevel(logging.DEBUG)
else:
self._log.setLevel(logging.WARNING)
try:
server = Server(lib, host, port, password)
server.cmd_setvol(None, volume)
server.run()
except NoGstreamerError:
global_log.error(u'Gstreamer Python bindings not found.')
global_log.error(u'Install "gstreamer1.0" and "python-gi"'
u'or similar package to use BPD.')
def commands(self):
cmd = beets.ui.Subcommand(
'bpd', help=u'run an MPD-compatible music player server'
)
cmd.parser.add_option(
'-d', '--debug', action='store_true',
help=u'dump all MPD traffic to stdout'
)
def func(lib, opts, args):
host = self.config['host'].as_str()
host = args.pop(0) if args else host
port = args.pop(0) if args else self.config['port'].get(int)
if args:
raise beets.ui.UserError(u'too many arguments')
password = self.config['password'].as_str()
volume = self.config['volume'].get(int)
debug = opts.debug or False
self.start_bpd(lib, host, int(port), password, volume, debug)
cmd.func = func
return [cmd]
|
jcoady9/beets
|
beetsplug/bpd/__init__.py
|
Python
|
mit
| 40,508
|
import sys
def solve():
moveList = sys.stdin.read().strip()
moves = {}
houseGrid = []
houseGrid.append([1])
moves['^'] = [-1, 0]
moves['v'] = [1, 0]
moves['<'] = [0, -1]
moves['>'] = [0, 1]
santa = {}
santa['x'] = 0
santa['y'] = 0
robo = {}
robo['x'] = 0
robo['y'] = 0
santaList = [(0,0)]
isSanta = True
for move in moveList:
currentMove = moves[move]
if(isSanta):
isSanta = False
santa['y'] = santa['y'] + currentMove[0]
santa['x'] = santa['x'] + currentMove[1]
currentPosition = (santa['y'], santa['x'])
if not currentPosition in santaList:
santaList.append(currentPosition)
else:
isSanta = True
robo['y'] = robo['y'] + currentMove[0]
robo['x'] = robo['x'] + currentMove[1]
currentPosition = (robo['y'], robo['x'])
if not currentPosition in santaList:
santaList.append(currentPosition)
print(str(len(santaList)))
if __name__ == "__main__":
solve()
|
Jaemu/advent-of-code
|
python/day-3.py
|
Python
|
mit
| 918
|
#!/usr/bin/env python3
from testUtils import Utils
from Cluster import Cluster
from WalletMgr import WalletMgr
from TestHelper import TestHelper
import random
###############################################################
# restart-scenarios-test
#
# Tests restart scenarios for nodeos. Uses "-c" flag to indicate "replay" (--replay-blockchain), "resync"
# (--delete-all-blocks), "hardReplay"(--hard-replay-blockchain), and "none" to indicate what kind of restart flag should
# be used. This is one of the only test that actually verify that nodeos terminates with a good exit status.
#
###############################################################
Print=Utils.Print
errorExit=Utils.errorExit
args=TestHelper.parse_args({"-p","-d","-s","-c","--kill-sig","--kill-count","--keep-logs"
,"--dump-error-details","-v","--leave-running","--clean-run"})
pnodes=args.p
topo=args.s
delay=args.d
chainSyncStrategyStr=args.c
debug=args.v
total_nodes = pnodes
killCount=args.kill_count if args.kill_count > 0 else 1
killSignal=args.kill_sig
killEosInstances= not args.leave_running
dumpErrorDetails=args.dump_error_details
keepLogs=args.keep_logs
killAll=args.clean_run
seed=1
Utils.Debug=debug
testSuccessful=False
random.seed(seed) # Use a fixed seed for repeatability.
cluster=Cluster(walletd=True)
walletMgr=WalletMgr(True)
try:
TestHelper.printSystemInfo("BEGIN")
cluster.setWalletMgr(walletMgr)
cluster.setChainStrategy(chainSyncStrategyStr)
cluster.setWalletMgr(walletMgr)
cluster.killall(allInstances=killAll)
cluster.cleanup()
walletMgr.killall(allInstances=killAll)
walletMgr.cleanup()
Print ("producing nodes: %d, topology: %s, delay between nodes launch(seconds): %d, chain sync strategy: %s" % (
pnodes, topo, delay, chainSyncStrategyStr))
Print("Stand up cluster")
if cluster.launch(pnodes=pnodes, totalNodes=total_nodes, topo=topo, delay=delay) is False:
errorExit("Failed to stand up eos cluster.")
Print ("Wait for Cluster stabilization")
# wait for cluster to start producing blocks
if not cluster.waitOnClusterBlockNumSync(3):
errorExit("Cluster never stabilized")
Print("Stand up EOS wallet keosd")
accountsCount=total_nodes
walletName="MyWallet"
Print("Creating wallet %s if one doesn't already exist." % walletName)
wallet=walletMgr.create(walletName, [cluster.eosioAccount,cluster.defproduceraAccount,cluster.defproducerbAccount])
Print ("Populate wallet with %d accounts." % (accountsCount))
if not cluster.populateWallet(accountsCount, wallet):
errorExit("Wallet initialization failed.")
defproduceraAccount=cluster.defproduceraAccount
eosioAccount=cluster.eosioAccount
Print("Importing keys for account %s into wallet %s." % (defproduceraAccount.name, wallet.name))
if not walletMgr.importKey(defproduceraAccount, wallet):
errorExit("Failed to import key for account %s" % (defproduceraAccount.name))
Print("Create accounts.")
if not cluster.createAccounts(eosioAccount):
errorExit("Accounts creation failed.")
Print("Wait on cluster sync.")
if not cluster.waitOnClusterSync():
errorExit("Cluster sync wait failed.")
Print("Kill %d cluster node instances." % (killCount))
if cluster.killSomeEosInstances(killCount, killSignal) is False:
errorExit("Failed to kill Eos instances")
Print("nodeos instances killed.")
Print("Spread funds and validate")
if not cluster.spreadFundsAndValidate(10):
errorExit("Failed to spread and validate funds.")
Print("Wait on cluster sync.")
if not cluster.waitOnClusterSync():
errorExit("Cluster sync wait failed.")
Print ("Relaunch dead cluster nodes instances.")
if cluster.relaunchEosInstances(cachePopen=True) is False:
errorExit("Failed to relaunch Eos instances")
Print("nodeos instances relaunched.")
Print ("Resyncing cluster nodes.")
if not cluster.waitOnClusterSync():
errorExit("Cluster never synchronized")
Print ("Cluster synched")
Print("Spread funds and validate")
if not cluster.spreadFundsAndValidate(10):
errorExit("Failed to spread and validate funds.")
Print("Wait on cluster sync.")
if not cluster.waitOnClusterSync():
errorExit("Cluster sync wait failed.")
if killEosInstances:
atLeastOne=False
for node in cluster.getNodes():
if node.popenProc is not None:
atLeastOne=True
node.interruptAndVerifyExitStatus()
assert atLeastOne, "Test is setup to verify that a cleanly interrupted nodeos exits with an exit status of 0, but this test may no longer be setup to do that"
testSuccessful=True
finally:
TestHelper.shutdown(cluster, walletMgr, testSuccessful=testSuccessful, killEosInstances=killEosInstances, killWallet=killEosInstances, keepLogs=keepLogs, cleanRun=killAll, dumpErrorDetails=dumpErrorDetails)
exit(0)
|
EOSIO/eos
|
tests/restart-scenarios-test.py
|
Python
|
mit
| 5,013
|
# Use holoviews to plot the particle distribution at given time
from pathlib import Path
import numpy as np
import xarray as xr
import holoviews as hv
from postladim import ParticleFile
hv.extension("bokeh")
# --- Settings ---
tstep = 40 # Time step to show
# Output file (and type)
output_file = "line_hv.png"
#output_file = "line_hv.html"
scale = 5 # Figure size factor
# --- Data files ---
ladim_dir = Path("../../")
grid_file = ladim_dir / "examples/data/ocean_avg_0014.nc"
particle_file = ladim_dir / "examples/line/line.nc"
# --- Read particle data ---
pf = ParticleFile(particle_file)
X, Y = pf.position(tstep)
# --- Background bathymetry data ---
# Read bathymetry and land mask
with xr.open_dataset(grid_file) as A:
H = A.h
M = A.mask_rho
jmax, imax = M.shape
H = H.where(M > 0) # Mask out land
M = M.where(M < 1) # Mask out sea
# --- Holoviews elements ---
# Land image
land = hv.Image(data=M, kdims=["xi_rho", "eta_rho"], group="Land")
# Bathymetry image
topo = hv.Image(data=-np.log10(H), kdims=["xi_rho", "eta_rho"], group="Topo")
# Particle distribution
spread = hv.Scatter(data=(X, Y))
# Overlay
h = topo * land * spread
# --- Plot options ---
h.opts(frame_width=scale * imax, frame_height=scale * jmax)
h.opts("Scatter", color="red")
h.opts("Image.Topo", cmap="blues_r", alpha=0.7)
h.opts("Image.Land", cmap=["#80B040"])
# --- Save output ---
if output_file.endswith("png"):
h.opts(toolbar=None)
hv.save(h, filename=output_file)
|
bjornaa/ladim
|
examples/line/plot_holoviews.py
|
Python
|
mit
| 1,481
|
import os
import random
import time
from flask import Flask, request, render_template, session, flash, redirect, \
url_for, jsonify
from flask.ext.mail import Mail, Message
from flask.ext.sqlalchemy import SQLAlchemy
from celery import Celery
app = Flask(__name__)
app.config['SECRET_KEY'] = 'top-secret!'
# Flask-Mail configuration
app.config['MAIL_SERVER'] = 'smtp.googlemail.com'
app.config['MAIL_PORT'] = 587
app.config['MAIL_USE_TLS'] = True
app.config['MAIL_USERNAME'] = "bdh931101@gmail.com"
app.config['MAIL_PASSWORD'] = "1Alzkdpf*^^*go"
app.config['MAIL_DEFAULT_SENDER'] = 'bdh931101@gmail.com'
# Celery configuration
app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/0'
app.config['CELERY_RESULT_BACKEND'] = 'redis://localhost:6379/0'
app.config.from_object(os.environ['APP_SETTINGS'])
db = SQLAlchemy(app)
from .models import MapInfo
# Initialize extensions
mail = Mail(app)
# Initialize Celery
celery = Celery(app.name, broker=app.config['CELERY_BROKER_URL'])
celery.conf.update(app.config)
@celery.task
def send_async_email(msg):
"""Background task to send an email with Flask-Mail."""
with app.app_context():
mail.send(msg)
@celery.task(bind=True)
def long_task(self):
"""Background task that runs a long function with progress reports."""
verb = ['Starting up', 'Booting', 'Repairing', 'Loading', 'Checking']
adjective = ['master', 'radiant', 'silent', 'harmonic', 'fast']
noun = ['solar array', 'particle reshaper', 'cosmic ray', 'orbiter', 'bit']
message = ''
total = random.randint(10, 50)
for i in range(total):
if not message or random.random() < 0.25:
message = '{0} {1} {2}...'.format(random.choice(verb),
random.choice(adjective),
random.choice(noun))
self.update_state(state='PROGRESS',
meta={'current': i, 'total': total,
'status': message})
time.sleep(1)
return {'current': 100, 'total': 100, 'status': 'Task completed!',
'result': 42}
@app.route('/', methods=['GET', 'POST'])
def index():
if request.method == 'GET':
return render_template('index.html', email=session.get('email', ''))
email = request.form['email']
session['email'] = email
# send the email
msg = Message('Hello from Flask',
recipients=[request.form['email']])
msg.body = 'This is a test email sent from a background Celery task.'
if request.form['submit'] == 'Send':
# send right away
send_async_email.delay(msg)
flash('Sending email to {0}'.format(email))
else:
# send in one minute
send_async_email.apply_async(args=[msg], countdown=60)
flash('An email will be sent to {0} in one minute'.format(email))
return redirect(url_for('index'))
@app.route('/longtask', methods=['POST'])
def longtask():
task = long_task.apply_async()
return jsonify({}), 202, {'Location': url_for('taskstatus',
task_id=task.id)}
@app.route('/status/<task_id>')
def taskstatus(task_id):
task = long_task.AsyncResult(task_id)
if task.state == 'PENDING':
response = {
'state': task.state,
'current': 0,
'total': 1,
'status': 'Pending...'
}
elif task.state != 'FAILURE':
response = {
'state': task.state,
'current': task.info.get('current', 0),
'total': task.info.get('total', 1),
'status': task.info.get('status', '')
}
if 'result' in task.info:
response['result'] = task.info['result']
else:
# something went wrong in the background job
response = {
'state': task.state,
'current': 1,
'total': 1,
'status': str(task.info), # this is the exception raised
}
return jsonify(response)
if __name__ == '__main__':
app.run(debug=True)
|
bdh1011/wau
|
app.py
|
Python
|
mit
| 4,092
|
import re
from jinja2 import Markup, escape, evalcontextfilter
from app import app
_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
@app.template_filter('nl2br')
@evalcontextfilter
def nl2br(eval_ctx, value):
result = u'\n\n'.join(u'<p>%s</p>' % p.replace('\n', '<br>\n')
for p in _paragraph_re.split(escape(value)))
if eval_ctx.autoescape:
result = Markup(result)
return result
|
streety/Home
|
app/jinja_filters.py
|
Python
|
mit
| 429
|
import django_tables2 as tables
from django_tables2 import RequestConfig
from django_tables2.utils import A # alias for Accessor
from django.shortcuts import render
import inspect
class DtTemplate(tables.Table):
#name_first = tables.Column(verbose_name="First Name")
#name_last = tables.LinkColumn('track:runner',args=[A('id')])
#gender = tables.Column(verbose_name="Gender")
#result_count=tables.Column( accessor='result_count',orderable=False,verbose_name="Number of results")
class Meta:
#model = Runner
attrs = {"class": "paleblue"}
|
ziposoft/godiva
|
src/zs/view_dt.py
|
Python
|
mit
| 582
|
#!/usr/bin/env python
""" MultiQC modules base class, contains helper functions """
from __future__ import print_function
from collections import OrderedDict
import io
import json
import mimetypes
import os
import random
import logging
from multiqc import config
logger = logging.getLogger(__name__)
letters = 'abcdefghijklmnopqrstuvwxyz'
class BaseMultiqcModule(object):
def __init__(self, name='base', anchor='base', target='',href='', info='', extra=''):
self.name = name
self.anchor = anchor
if not target:
target = self.name
self.intro = '<p><a href="{0}" target="_blank">{1}</a> {2}</p>{3}'.format(
href, target, info, extra
)
def find_log_files(self, fn_match=None, contents_match=None, filehandles=False):
"""
Search the analysis directory for log files of interest. Can take either a filename
suffix or a search string to return only log files that contain relevant info.
:param fn_match: Optional string or list of strings. Filename suffixes to search for.
:param contents_match: Optional string or list of strings to look for in file.
NB: Both searches return file if *any* of the supplied strings are matched.
:param filehandles: Set to true to return a file handle instead of slurped file contents
:return: Yields a set with two items - a sample name generated from the filename
and either the file contents or file handle for the current matched file.
As yield is used, the function can be iterated over without
"""
for root, dirnames, filenames in os.walk(config.analysis_dir, followlinks=True):
for fn in filenames:
# Make a sample name from the filename
s_name = self.clean_s_name(fn, root)
# Make search strings into lists if a string is given
if type(fn_match) is str:
fn_match = [fn_match]
if type(contents_match) is str:
contents_match = [contents_match]
# Search for file names ending in a certain string
readfile = False
if fn_match is not None:
for m in fn_match:
if m in fn:
readfile = True
break
else:
readfile = True
# Limit search to files under 1MB to avoid 30GB FastQ files etc.
try:
filesize = os.path.getsize(os.path.join(root,fn))
except (IOError, OSError, ValueError, UnicodeDecodeError):
log.debug("Couldn't read file when looking for output: {}".format(fn))
readfile = False
else:
if filesize > 1000000:
readfile = False
# Use mimetypes to exclude binary files where possible
(ftype, encoding) = mimetypes.guess_type(os.path.join(root, fn))
if encoding is not None:
readfile = False # eg. gzipped files
if ftype is not None and ftype.startswith('text') is False:
readfile = False # eg. images - 'image/jpeg'
if readfile:
try:
with io.open (os.path.join(root,fn), "r", encoding='utf-8') as f:
# Search this file for our string of interest
returnfile = False
if contents_match is not None:
for line in f:
for m in contents_match:
if m in line:
returnfile = True
break
f.seek(0)
else:
returnfile = True
# Give back what was asked for. Yield instead of return
# so that this function can be used as an interator
# without loading all files at once.
if returnfile:
if filehandles:
yield {'s_name': s_name, 'f': f, 'root': root, 'fn': fn}
else:
yield {'s_name': s_name, 'f': f.read(), 'root': root, 'fn': fn}
except (IOError, OSError, ValueError, UnicodeDecodeError):
logger.debug("Couldn't read file when looking for output: {}".format(fn))
def clean_s_name(self, s_name, root):
""" Helper function to take a long file name and strip it
back to a clean sample name. Somewhat arbitrary.
:param s_name: The sample name to clean
:param root: The directory path that this file is within
:param prepend_dirs: boolean, whether to prepend dir name to s_name
:param trimmed: boolean, remove common trimming suffixes from name?
:return: The cleaned sample name, ready to be used
"""
# Split then take first section to remove everything after these matches
for ext in config.fn_clean_exts:
s_name = s_name.split(ext ,1)[0]
if config.prepend_dirs:
s_name = "{} | {}".format(root.replace(os.sep, ' | '), s_name).lstrip('. | ')
return s_name
def plot_xy_data(self, data, config={}, original_plots=[]):
""" Plot a line graph with X,Y data. See CONTRIBUTING.md for
further instructions on use.
:param data: 2D dict, first keys as sample names, then x:y data pairs
:param original_plots: optional list of dicts with keys 's_name' and 'img_path'
:param config: optional dict with config key:value pairs. See CONTRIBUTING.md
:param original_plots: optional list specifying original plot images. Each dict
should have a key 's_name' and 'img_path'
:return: HTML and JS, ready to be inserted into the page
"""
# Given one dataset - turn it into a list
if type(data) is not list:
data = [data]
# Generate the data dict structure expected by HighCharts series
plotdata = list()
for d in data:
thisplotdata = list()
for s in sorted(d.keys()):
pairs = list()
maxval = 0
for k in sorted(d[s].keys()):
pairs.append([k, d[s][k]])
maxval = max(maxval, d[s][k])
if maxval > 0 or config.get('hide_empty') is not True:
this_series = { 'name': s, 'data': pairs }
try:
this_series['color'] = config['colors'][s]
except: pass
thisplotdata.append(this_series)
plotdata.append(thisplotdata)
# Build the HTML for the page
if config.get('id') is None:
config['id'] = 'mqc_hcplot_'+''.join(random.sample(letters, 10))
html = ''
# Buttons to cycle through different datasets
if len(plotdata) > 1:
html += '<div class="btn-group switch_group">\n'
for k, p in enumerate(plotdata):
active = 'active' if k == 0 else ''
try: name = config['data_labels'][k]['name']
except: name = k+1
try: ylab = 'data-ylab="{}"'.format(config['data_labels'][k]['ylab'])
except: ylab = 'data-ylab="{}"'.format(name) if name != k+1 else ''
html += '<button class="btn btn-default btn-sm {a}" data-action="set_data" {y} data-newdata="{id}_datasets[{k}]" data-target="#{id}">{n}</button>\n'.format(a=active, id=config['id'], n=name, y=ylab, k=k)
html += '</div>\n\n'
# Markup needed if we have the option of clicking through to original plot images
if len(original_plots) > 0:
config['tt_label'] = 'Click to show original plot.<br>{}'.format(config.get('tt_label', '{point.x}: {point.y:.2f}'))
if len(original_plots) > 1:
next_prev_buttons = '<div class="clearfix"><div class="btn-group btn-group-sm"> \n\
<a href="#{prev}" class="btn btn-default original_plot_prev_btn" data-target="#{id}">« Previous</a> \n\
<a href="#{next}" class="btn btn-default original_plot_nxt_btn" data-target="#{id}">Next »</a> \n\
</div></div>'.format(id=config['id'], prev=original_plots[-1]['s_name'], next=original_plots[1]['s_name'])
else:
next_prev_buttons = ''
html += '<p class="text-muted instr">Click to show original FastQC plot.</p>\n\
<div id="{id}_wrapper" class="hc-plot-wrapper"> \n\
<div class="showhide_orig" style="display:none;"> \n\
<h4><span class="s_name">{n}</span></h4> \n\
{b} <img data-toggle="tooltip" title="Click to return to overlay plot" class="original-plot" src="{f}"> \n\
</div>\n\
<div id="{id}" class="hc-plot"></div> \n\
</div>'.format(id=config['id'], b=next_prev_buttons, n=original_plots[0]['s_name'], f=original_plots[0]['img_path'])
orig_plots = 'var {id}_orig_plots = {d}; \n'.format(id=config['id'], d=json.dumps(original_plots))
config['orig_click_func'] = True # Javascript prints the click function
# Regular plots (no original images)
else:
html += '<div id="{id}" class="hc-plot hc-line-plot"></div> \n'.format(id=config['id'])
orig_plots = ''
# Javascript with data dump
html += '<script type="text/javascript"> \n\
var {id}_datasets = {d}; \n\
{o} \
$(function () {{ plot_xy_line_graph("#{id}", {id}_datasets[0], {c}); }}); \n\
</script>'.format(id=config['id'], d=json.dumps(plotdata), c=json.dumps(config), o=orig_plots);
return html
def plot_bargraph (self, data, cats=None, config={}):
""" Plot a horizontal bar graph. Expects a 2D dict of sample
data. Also can take info about categories. There are quite a
few variants of how to use this function, see CONTRIBUTING.md
for documentation and examples.
:param data: 2D dict, first keys as sample names, then x:y data pairs
Can supply a list of dicts and will have buttons to switch
:param cats: optional list, dict or OrderedDict with plot categories
:param config: optional dict with config key:value pairs
:return: HTML and JS, ready to be inserted into the page
"""
# Given one dataset - turn it into a list
if type(data) is not list:
data = [data]
# Check we have a list of cats
if type(cats) is not list or type(cats[0]) is str:
cats = [cats]
# Check that we have cats at all - find them from the data
for idx, cat in enumerate(cats):
if cats[idx] is None:
cats[idx] = list(set(k for s in data[idx].keys() for k in data[idx][s].keys() ))
# Given a list of cats - turn it into a dict
for idx, cat in enumerate(cats):
if type(cat) is list:
newcats = OrderedDict()
for c in cat:
newcats[c] = {'name': c}
cats[idx] = newcats
# Parse the data into a HighCharts friendly format
plotsamples = list()
plotdata = list()
for idx, d in enumerate(data):
hc_samples = sorted(list(d.keys()))
hc_data = list()
for c in cats[idx].keys():
thisdata = list()
for s in hc_samples:
thisdata.append(d[s][c])
if max(thisdata) > 0:
thisdict = { 'name': cats[idx][c]['name'], 'data': thisdata }
if 'color' in cats[idx][c]:
thisdict['color'] = cats[idx][c]['color']
hc_data.append(thisdict)
plotsamples.append(hc_samples)
plotdata.append(hc_data)
# Build the HTML
if config.get('id') is None:
config['id'] = 'mqc_hcplot_'+''.join(random.sample(letters, 10))
html = ''
# Counts / Percentages Switch
if config.get('cpswitch') is not False:
if config.get('cpswitch_c_active', True) is True:
c_active = 'active'
p_active = ''
else:
c_active = ''
p_active = 'active'
config['stacking'] = 'percent'
c_label = config.get('cpswitch_counts_label', 'Counts')
p_label = config.get('cpswitch_percent_label', 'Percentages')
html += '<div class="btn-group switch_group"> \n\
<button class="btn btn-default btn-sm {c_a}" data-action="set_numbers" data-target="#{id}">{c_l}</button> \n\
<button class="btn btn-default btn-sm {p_a}" data-action="set_percent" data-target="#{id}">{p_l}</button> \n\
</div> '.format(id=config['id'], c_a=c_active, p_a=p_active, c_l=c_label, p_l=p_label)
if len(plotdata) > 1:
html += ' '
# Buttons to cycle through different datasets
if len(plotdata) > 1:
html += '<div class="btn-group switch_group">\n'
for k, p in enumerate(plotdata):
active = 'active' if k == 0 else ''
try: name = config['data_labels'][k]
except: name = k+1
try: ylab = 'data-ylab="{}"'.format(config['data_labels'][k]['ylab'])
except: ylab = 'data-ylab="{}"'.format(name) if name != k+1 else ''
html += '<button class="btn btn-default btn-sm {a}" data-action="set_data" {y} data-newdata="{id}_datasets[{k}]" data-target="#{id}">{n}</button>\n'.format(a=active, id=config['id'], n=name, y=ylab, k=k)
html += '</div>\n\n'
# Plot and javascript function
html += '<div id="{id}" class="hc-plot hc-bar-plot"></div> \n\
<script type="text/javascript"> \n\
var {id}_samples = {s}; \n\
var {id}_datasets = {d}; \n\
$(function () {{ plot_stacked_bar_graph("#{id}", {id}_samples[0], {id}_datasets[0], {c}); }}); \
</script>'.format(id=config['id'], s=json.dumps(plotsamples), d=json.dumps(plotdata), c=json.dumps(config));
return html
def write_csv_file(self, data, fn):
with io.open (os.path.join(config.output_dir, 'report_data', fn), "w", encoding='utf-8') as f:
print( self.dict_to_csv( data ), file=f)
def dict_to_csv (self, d, delim="\t"):
""" Converts a dict to a CSV string
:param d: 2D dictionary, first keys sample names and second key
column headers
:param delim: optional delimiter character. Default: \t
:return: Flattened string, suitable to write to a CSV file.
"""
h = None # We make a list of keys to ensure consistent order
l = list()
for sn in sorted(d.keys()):
if h is None:
h = list(d[sn].keys())
l.append(delim.join([''] + h))
thesefields = [sn] + [ str(d[sn].get(k, '')) for k in h ]
l.append( delim.join( thesefields ) )
return ('\n'.join(l)).encode('utf-8', 'ignore').decode('utf-8')
|
moonso/MultiQC
|
multiqc/base_module.py
|
Python
|
mit
| 16,214
|
#!/usr/bin/env python
#to create a file in codesnippets folder
import pyperclip
import os
import re
import subprocess
def get_extension(file_name):
if file_name.find('.')!=-1:
ext = file_name.split('.')
return (ext[1])
else:
return 'txt'
def cut(str, len1):
return str[len1 + 1:] #to remove first line which is meant for reading from which file
#for displaying contents
def find(name, path):
for root, dirs, files in os.walk(path):
if name in files:
return os.path.join(root, name)
#ubuntu notification (message sending)
def sendmessage(message):
subprocess.Popen(['notify-send', message])
return
while True:
str = pyperclip.paste()
if (str==" "):
continue
str_low = str.lower()
str_lower=str_low.split("\n") #this is to ensure that only create a file if "add to code snippets" line is first line since if this line is present with other text which is not intended to be saved (i.e in btw that unwanted text
#as we are using regular expression it checks a pattern in a given text so "add to code snippets " must be definitely first line
if(str_lower[0]=="stop -safe"):
sendmessage("Stopped the background process for code snippet management...byebye")
os.exit()
if (str_lower[0].find("add") != -1 and str_lower[0].find("code")!=-1 and
str_lower[0].find("snippets") !=-1 and str_lower[0].find("-safe") !=-1 ):
if re.search(r'\w+\.[a-z,A-Z]',str_lower[0])==None:
sendmessage("SPECIFY FILEEXTENSION (default file type is txt)")
str1 = str.split('\n')
str2 = str1[0].split(' ')
length = len(str2)
file_name = str2[length - 2]
new_str = cut(str, len(str1[0]))
# until here we removed first line which contains " add this to code snippet filename"
# print new_str
# creating a file with the above name
try:
# code_snippets is the head folder
if not os.path.exists('/home/nikhil/code_snippets'):
os.makedirs('/home/nikhil/code_snippets') # creating the directory if not exists
extension = get_extension(file_name)
# creating a folder with respective extenion names in uppercase
if not os.path.exists('/home/nikhil/code_snippets/'
+ extension.upper()):
os.makedirs('/home/nikhil/code_snippets/' + extension.upper())
print
# creating a file in respective folder
if not os.path.exists('/home/nikhil/code_snippets/' + extension.upper() + '/'
+ file_name):
name = open('/home/nikhil/code_snippets/' + extension.upper() + '/'
+ file_name, 'w')
name.write(new_str)
name.truncate()
name.close()
sendmessage("successfully added to code snippets collection")
pyperclip.copy(" ")
except Exception:
try:
already_exists = open('/home/nikhil/code_snippets/' + extension.upper() + '/'
+ file_name, 'a+')
#new_str = cut(str, len(str1[0]))
str_from_file = already_exists.read()
#already_exists.seek(0) #http://stackoverflow.com/questions/6648493/open-file-for-both-reading-and-writing#answer-15976014
already_exists.write('\n\n@@\n'+new_str)
already_exists.truncate()
already_exists.close()
sendmessage("successfully added to code snippets collection (code has been appended to already existing file with same name)")
str=pyperclip.copy(" ")
except:
print "oops some error in finding file to append content"
sendmessage("ERROR OCCURED")
pyperclip.copy(" ")
os.system('python /home/nikhil/Desktop/haha.py')
|
nikhilponnuru/codeCrumbs
|
code/create_file.py
|
Python
|
mit
| 4,006
|