text
stringlengths
4
1.02M
meta
dict
from __future__ import unicode_literals from indico.core.db import db from indico.util.string import format_repr, return_ascii class MapArea(db.Model): __tablename__ = 'map_areas' __table_args__ = (db.Index(None, 'is_default', unique=True, postgresql_where=db.text('is_default')), {'schema': 'roombooking'}) id = db.Column( db.Integer, primary_key=True ) name = db.Column( db.String, nullable=False ) is_default = db.Column( db.Boolean, nullable=False, default=False ) top_left_latitude = db.Column( db.Float, nullable=False ) top_left_longitude = db.Column( db.Float, nullable=False ) bottom_right_latitude = db.Column( db.Float, nullable=False ) bottom_right_longitude = db.Column( db.Float, nullable=False ) @return_ascii def __repr__(self): return format_repr(self, 'id', 'name', is_default=False)
{ "content_hash": "73a04e42a3d18a539fc03c674e142db9", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 104, "avg_line_length": 23.386363636363637, "alnum_prop": 0.5685131195335277, "repo_name": "mic4ael/indico", "id": "f672300fdc7199479c4e8df40f6f7108a7516572", "size": "1243", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "indico/modules/rb/models/map_areas.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "553825" }, { "name": "HTML", "bytes": "1375160" }, { "name": "JavaScript", "bytes": "1852830" }, { "name": "Mako", "bytes": "1340" }, { "name": "Python", "bytes": "4612709" }, { "name": "Shell", "bytes": "2665" }, { "name": "TeX", "bytes": "23292" }, { "name": "XSLT", "bytes": "1504" } ], "symlink_target": "" }
class Student(object): _slots_ = ('name', 'age') class GraduateStudent(Student): pass s = Student() s.name = 'Michael' s.age = 15 try: s.score = 88 except AttributeError as e: print('AttributeError:', e) g = GraduateStudent() g.score = 99 print(g.score)
{ "content_hash": "f849c39f9d7469a725f5b97362104468", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 31, "avg_line_length": 14.473684210526315, "alnum_prop": 0.64, "repo_name": "wuchengang/PythonLearing", "id": "ef36129ae7c296f393e6a999894bf058a94b8aeb", "size": "323", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "use_slots.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "32694" }, { "name": "Python", "bytes": "7070" } ], "symlink_target": "" }
"""Tests for the fake storage.""" from __future__ import unicode_literals import unittest from plaso.containers import event_sources from plaso.containers import reports from plaso.containers import sessions from plaso.containers import tasks from plaso.containers import warnings from plaso.lib import definitions from plaso.storage.fake import writer as fake_writer from tests.storage import test_lib from tests.containers import test_lib as containers_test_lib class FakeStorageWriterTest(test_lib.StorageTestCase): """Tests for the fake storage writer object.""" def testAddAnalysisReport(self): """Tests the AddAnalysisReport function.""" session = sessions.Session() analysis_report = reports.AnalysisReport( plugin_name='test', text='test report') storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() storage_writer.AddAnalysisReport(analysis_report) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddAnalysisReport(analysis_report) def testAddEvent(self): """Tests the AddEvent function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() event = None for event, event_data, event_data_stream in ( containers_test_lib.CreateEventsFromValues(self._TEST_EVENTS)): storage_writer.AddEventDataStream(event_data_stream) event_data.SetEventDataStreamIdentifier(event_data_stream.GetIdentifier()) storage_writer.AddEventData(event_data) event.SetEventDataIdentifier(event_data.GetIdentifier()) storage_writer.AddEvent(event) storage_writer.Close() # Test writing an event twice. with self.assertRaises(IOError): storage_writer.AddEvent(event) def testAddEventSource(self): """Tests the AddEventSource function.""" session = sessions.Session() event_source = event_sources.EventSource() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() storage_writer.AddEventSource(event_source) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddEventSource(event_source) def testAddEventTag(self): """Tests the AddEventTag function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() test_events = [] for event, event_data, event_data_stream in ( containers_test_lib.CreateEventsFromValues(self._TEST_EVENTS)): storage_writer.AddEventDataStream(event_data_stream) event_data.SetEventDataStreamIdentifier(event_data_stream.GetIdentifier()) storage_writer.AddEventData(event_data) event.SetEventDataIdentifier(event_data.GetIdentifier()) storage_writer.AddEvent(event) test_events.append(event) event_tag = None test_event_tags = self._CreateTestEventTags(test_events) for event_tag in test_event_tags: storage_writer.AddEventTag(event_tag) storage_writer.Close() # Test writing an event tag twice. with self.assertRaises(IOError): storage_writer.AddEventTag(event_tag) def testAddWarning(self): """Tests the AddWarning function.""" session = sessions.Session() warning = warnings.ExtractionWarning( message='Test extraction error') storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() storage_writer.AddWarning(warning) storage_writer.Close() with self.assertRaises(IOError): storage_writer.AddWarning(warning) def testOpenClose(self): """Tests the Open and Close functions.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() storage_writer.Close() storage_writer.Open() storage_writer.Close() storage_writer = fake_writer.FakeStorageWriter( session, storage_type=definitions.STORAGE_TYPE_TASK) storage_writer.Open() storage_writer.Close() storage_writer.Open() with self.assertRaises(IOError): storage_writer.Open() storage_writer.Close() with self.assertRaises(IOError): storage_writer.Close() def testGetEvents(self): """Tests the GetEvents function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() for event, event_data, event_data_stream in ( containers_test_lib.CreateEventsFromValues(self._TEST_EVENTS)): storage_writer.AddEventDataStream(event_data_stream) event_data.SetEventDataStreamIdentifier(event_data_stream.GetIdentifier()) storage_writer.AddEventData(event_data) event.SetEventDataIdentifier(event_data.GetIdentifier()) storage_writer.AddEvent(event) test_events = list(storage_writer.GetEvents()) self.assertEqual(len(test_events), 4) storage_writer.Close() # TODO: add tests for GetEventSources. # TODO: add tests for GetEventTags. # TODO: add tests for GetFirstWrittenEventSource and # GetNextWrittenEventSource. def testGetSortedEvents(self): """Tests the GetSortedEvents function.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() for event, event_data, event_data_stream in ( containers_test_lib.CreateEventsFromValues(self._TEST_EVENTS)): storage_writer.AddEventDataStream(event_data_stream) event_data.SetEventDataStreamIdentifier(event_data_stream.GetIdentifier()) storage_writer.AddEventData(event_data) event.SetEventDataIdentifier(event_data.GetIdentifier()) storage_writer.AddEvent(event) test_events = list(storage_writer.GetSortedEvents()) self.assertEqual(len(test_events), 4) storage_writer.Close() # TODO: add test with time range. def testWriteSessionStartAndCompletion(self): """Tests the WriteSessionStart and WriteSessionCompletion functions.""" session = sessions.Session() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() storage_writer.WriteSessionStart() storage_writer.WriteSessionCompletion() storage_writer.Close() with self.assertRaises(IOError): storage_writer.WriteSessionStart() with self.assertRaises(IOError): storage_writer.WriteSessionCompletion() storage_writer = fake_writer.FakeStorageWriter( session, storage_type=definitions.STORAGE_TYPE_TASK) storage_writer.Open() with self.assertRaises(IOError): storage_writer.WriteSessionStart() with self.assertRaises(IOError): storage_writer.WriteSessionCompletion() storage_writer.Close() def testWriteTaskStartAndCompletion(self): """Tests the WriteTaskStart and WriteTaskCompletion functions.""" session = sessions.Session() task = tasks.Task(session_identifier=session.identifier) storage_writer = fake_writer.FakeStorageWriter( session, storage_type=definitions.STORAGE_TYPE_TASK, task=task) storage_writer.Open() storage_writer.WriteTaskStart() storage_writer.WriteTaskCompletion() storage_writer.Close() with self.assertRaises(IOError): storage_writer.WriteTaskStart() with self.assertRaises(IOError): storage_writer.WriteTaskCompletion() storage_writer = fake_writer.FakeStorageWriter(session) storage_writer.Open() with self.assertRaises(IOError): storage_writer.WriteTaskStart() with self.assertRaises(IOError): storage_writer.WriteTaskCompletion() storage_writer.Close() if __name__ == '__main__': unittest.main()
{ "content_hash": "009de7cf127a05391d3cd80dc3e5cac5", "timestamp": "", "source": "github", "line_count": 263, "max_line_length": 80, "avg_line_length": 29.24334600760456, "alnum_prop": 0.725003250552594, "repo_name": "rgayon/plaso", "id": "fa1f49d84a1992ff73bf2e920844944f125a07bd", "size": "7738", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/storage/fake/writer.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "415" }, { "name": "Dockerfile", "bytes": "1047" }, { "name": "Makefile", "bytes": "712" }, { "name": "PowerShell", "bytes": "17771" }, { "name": "Python", "bytes": "4803191" }, { "name": "Ruby", "bytes": "926" }, { "name": "Shell", "bytes": "46225" } ], "symlink_target": "" }
import canmatrix.formats import sys import optparse # command line options... usage = """ %prog [options] matrix matrixX can be any of *.dbc|*.dbf|*.kcd|*.arxml """ parser = optparse.OptionParser(usage=usage) parser.add_option( "-f", "--frames", dest="frames", help="encode list of frames", default="*") (cmdlineOptions, args) = parser.parse_args() if len(args) < 1: parser.print_help() sys.exit(1) # load matrix db = canmatrix.formats.loadp_flat(args[0]) #get all frames which match the commandline frames = db.glob_frames(cmdlineOptions.frames) #helper to read physical value from user def read_signal_value_from_user(signal): a = input("Enter Value for " + signal.name + " ") if signal.is_float: return float(a) else: return int(a) # go through all frames for frame in frames: print (frame.name) if frame.is_complex_multiplexed: # ignore complex multiplexed signals continue if frame.is_multiplexed: # if multiplexed frame search for multiplexer multiplexer_signal = frame.get_multiplexer # read multiplexer value a = input("Enter Value for Multiplexer " + multiplexer_signal.name + " ") signalDict = dict() signalDict[multiplexer_signal.name] = int(a) # read signals for the given multiplexer value for signal in frame.get_signals_for_multiplexer_value(int(a)): signalDict[signal.name] = read_signal_value_from_user(signal) else: # not multiplexed frame signalDict = dict() # go through all signals for signal in frame.signals: signalDict[signal.name] = read_signal_value_from_user(signal) frame_data = frame.encode(signalDict) if frame.arbitration_id.extended: print("{:05X}#".format(frame.arbitration_id.id) + "".join(["%02X" % i for i in frame_data])) else: print("{:03X}#".format(frame.arbitration_id.id) + "".join(["%02X" % i for i in frame_data]))
{ "content_hash": "8f369878cbe19d63fdeb6a7458fbf75b", "timestamp": "", "source": "github", "line_count": 71, "max_line_length": 100, "avg_line_length": 28.253521126760564, "alnum_prop": 0.6470588235294118, "repo_name": "altendky/canmatrix", "id": "1e36f64b8e530e1a406788709063cb4fadacf675", "size": "2030", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "examples/encodeFrame.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Batchfile", "bytes": "466" }, { "name": "Python", "bytes": "746092" }, { "name": "Shell", "bytes": "83" } ], "symlink_target": "" }
def parimad_katsed(sonastik): tagastatav=set() for nimi in sonastik: tagastatav.add((nimi, max(sonastik[nimi]))) return tagastatav sisend=input("Sisesta failinimi: ") tulemused={} f=open(sisend, encoding="utf-8") while True: nimi=f.readline().strip() if nimi=="": break tulemused[nimi]=[] for i in range(3): tulemused[nimi].append(float(f.readline().strip())) f.close() paaridehulk=parimad_katsed(tulemused) parimanimi="" parimtulemus=0 for nimi, tulemus in paaridehulk: #nimi,tulemus=paar #meh if tulemus>parimtulemus: parimtulemus=tulemus parimanimi=nimi print(nimi+": "+str(tulemus)) print("Parim oli "+parimanimi)
{ "content_hash": "ca5357a42be6d17bce358623b7ce61c0", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 59, "avg_line_length": 23.21875, "alnum_prop": 0.6258411843876177, "repo_name": "macobo/python-grader", "id": "2703d2f488e46493f1cbf3b8891df5e8f3bff4c0", "size": "743", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tasks/MTAT.03.100/2013/Midterm_1_resit/KT2_J1_kettaheide_solution.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "224193" }, { "name": "Shell", "bytes": "998" } ], "symlink_target": "" }
from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('EOSS', '0016_engineercontext_measurement_parameter'), ] operations = [ migrations.AddField( model_name='eosscontext', name='cluster_arn', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='cluster_name', field=models.TextField(default='daphne-cluster'), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_request_queue_arn', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_request_queue_name', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_request_queue_url', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_response_queue_arn', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_response_queue_name', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_response_queue_url', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_service_arn', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_service_name', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_task_arn', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_task_count', field=models.IntegerField(default=0), ), migrations.AddField( model_name='eosscontext', name='design_evaluator_task_name', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='eval_request_queue_url', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='eval_response_queue_url', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='genetic_algorithm_service_arn', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='genetic_algorithm_service_name', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='genetic_algorithm_task_arn', field=models.TextField(null=True), ), migrations.AddField( model_name='eosscontext', name='genetic_algorithm_task_count', field=models.IntegerField(default=0), ), migrations.AddField( model_name='eosscontext', name='genetic_algorithm_task_name', field=models.TextField(null=True), ), migrations.CreateModel( name='EvaluatorInstance', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('ping_request_queue_url', models.TextField(null=True)), ('ping_response_queue_url', models.TextField(null=True)), ('private_request_queue_url', models.TextField(null=True)), ('private_response_queue_url', models.TextField(null=True)), ('arn', models.TextField(null=True)), ('eosscontext', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='EOSS.eosscontext')), ], ), ]
{ "content_hash": "c9ed334106e03b96902fee4f7759826d", "timestamp": "", "source": "github", "line_count": 124, "max_line_length": 119, "avg_line_length": 35.74193548387097, "alnum_prop": 0.5577617328519856, "repo_name": "seakers/daphne_brain", "id": "c617b797c51dbb175f62b7572b00114855b8bf12", "size": "4481", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "EOSS/migrations/0017_eosscontext_cluster_arn_eosscontext_cluster_name_and_more.py", "mode": "33188", "license": "mit", "language": [ { "name": "PLpgSQL", "bytes": "1683352" }, { "name": "Python", "bytes": "1557398" }, { "name": "Shell", "bytes": "4153" } ], "symlink_target": "" }
from __future__ import unicode_literals import re __all__ = ('JsMinifier', 'minify') PLACEHOLDER = '~{{[({0})]}}~' COMMENTS_RE = re.compile(r'// .*?') EMPTY_LINES_RE = re.compile(r'\n{2,}') LEADING_SPACES_RE = re.compile(r'^ +', re.M) MULTI_LINE_COMMENTS_RE = re.compile(r'/\*.*?\*/', re.DOTALL) NEWLINES_RE = re.compile(r'(\r\n|\r)') PLACEHOLDERS_RE = re.compile(r'\~\{\[\((\d+)\)\]\}\~') TRAILING_SPACES_RE = re.compile(r' +$', re.M) WHITESPACES_RE = re.compile(r'[ \t]+') class JsMinifier(object): """ Removes all extra whitespaces, comments and other unneeded characters. """ def minify(self, code): self.placeholders = [] return self._minify(code) def _minify(self, code): code = self.process_newlines(code) #code = self.process_comments(code) code = self.process_whitespaces(code) code = self.process_leading_spaces(code) code = self.process_trailing_spaces(code) code = self.process_empty_lines(code) #code = self.fill_placeholders(code) return code def fill_placeholders(self, code): return PLACEHOLDERS_RE.sub( self.placeholders_replacement, code) def placeholders_replacement(self, matchobj): try: return self.placeholders[int(matchobj.group(1))] except IndexError: return '' def process_comments(self, code): code = COMMENTS_RE.sub('', code) code = MULTI_LINE_COMMENTS_RE.sub('', code) return code def process_empty_lines(self, code): return EMPTY_LINES_RE.sub(r'\n', code) def process_leading_spaces(self, code): return LEADING_SPACES_RE.sub('', code) def process_newlines(self, code): return NEWLINES_RE.sub(r'\n', code) def process_trailing_spaces(self, code): return TRAILING_SPACES_RE.sub('', code) def process_whitespaces(self, code): return WHITESPACES_RE.sub(' ', code) def reserve(self, code): self.placeholders.append(code) return PLACEHOLDER.format(len(self.placeholders) - 1) def minify(code): minifier = JsMinifier() return minifier.minify(code)
{ "content_hash": "7fef7bec664394afa1bb0575504b4585", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 74, "avg_line_length": 28.493506493506494, "alnum_prop": 0.6107566089334548, "repo_name": "samuelmaudo/yepes", "id": "18fbae2d7050c2adaeb1bdbd180bbebcd3439079", "size": "2218", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "yepes/utils/minifier/backends/js.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C++", "bytes": "1485" }, { "name": "CSS", "bytes": "2805" }, { "name": "HTML", "bytes": "18543" }, { "name": "JavaScript", "bytes": "56039" }, { "name": "Python", "bytes": "2415982" } ], "symlink_target": "" }
""" Utilities for authentication and authorization """ import binascii from base64 import b64decode # pylint: disable=E0611,W0403 from paste.httpheaders import AUTHORIZATION, WWW_AUTHENTICATE # pylint: enable=E0611,W0403 from pyramid.authorization import ACLAuthorizationPolicy from pyramid.httpexceptions import HTTPForbidden, HTTPUnauthorized from pyramid.security import Everyone, authenticated_userid # Copied from # http://docs.pylonsproject.org/projects/pyramid_cookbook/en/latest/auth/basic.html def get_basicauth_credentials(request): """ Get the user/password from HTTP basic auth """ authorization = AUTHORIZATION(request.environ) try: authmeth, auth = authorization.split(" ", 1) except ValueError: # not enough values to unpack return None if authmeth.lower() == "basic": try: auth = b64decode(auth.strip()).decode("utf8") except (TypeError, binascii.Error): # can't decode return None try: login, password = auth.split(":", 1) except ValueError: # not enough values to unpack return None return {"login": login, "password": password} return None class BasicAuthenticationPolicy(object): """A :app:`Pyramid` :term:`authentication policy` which obtains data from basic authentication headers. Constructor Arguments ``check`` A callback passed the credentials and the request, expected to return None if the userid doesn't exist or a sequence of group identifiers (possibly empty) if the user does exist. Required. """ def authenticated_userid(self, request): """ Verify login and return the authed userid """ credentials = get_basicauth_credentials(request) if credentials is None: return None userid = credentials["login"] if request.access.verify_user(credentials["login"], credentials["password"]): return userid return None def unauthenticated_userid(self, request): """ Return userid without performing auth """ return request.userid def effective_principals(self, request): """ Get the authed groups for the active user """ if request.userid is None: return [Everyone] return request.access.user_principals(request.userid) def remember(self, request, principal, **kw): """ HTTP Headers to remember credentials """ return [] def forget(self, request): """ HTTP headers to forget credentials """ return [] class SessionAuthPolicy(object): """ Simple auth policy using beaker sessions """ def authenticated_userid(self, request): """Return the authenticated userid or ``None`` if no authenticated userid can be found. This method of the policy should ensure that a record exists in whatever persistent store is used related to the user (the user should not have been deleted); if a record associated with the current id does not exist in a persistent store, it should return ``None``.""" return request.session.get("user", None) def unauthenticated_userid(self, request): """Return the *unauthenticated* userid. This method performs the same duty as ``authenticated_userid`` but is permitted to return the userid based only on data present in the request; it needn't (and shouldn't) check any persistent store to ensure that the user record related to the request userid exists.""" return request.userid def effective_principals(self, request): """Return a sequence representing the effective principals including the userid and any groups belonged to by the current user, including 'system' groups such as ``pyramid.security.Everyone`` and ``pyramid.security.Authenticated``.""" if request.userid is None: return [Everyone] return request.access.user_principals(request.userid) def remember(self, request, principal, **_): """ This implementation is slightly different than expected. The application should call remember(userid) rather than remember(principal) """ request.session["user"] = principal return [] def forget(self, request): """Return a set of headers suitable for 'forgetting' the current user on subsequent requests.""" request.session.delete() return [] def _is_logged_in(request): """ Check if there is a logged-in user in the session """ return request.userid is not None def _request_login(request): """ Return a 401 to force pip to upload its HTTP basic auth credentials """ response = HTTPUnauthorized() realm = WWW_AUTHENTICATE.tuples('Basic realm="%s"' % request.registry.realm) response.headers.update(realm) return response def _forbid(request): """ Return a 403 if user is logged in, otherwise return a 401. This is required to force pip to upload its HTTP basic auth credentials """ if request.is_logged_in: return HTTPForbidden() else: return _request_login(request) def includeme(config): """ Configure the app """ config.set_authorization_policy(ACLAuthorizationPolicy()) config.set_authentication_policy(config.registry.authentication_policy) config.add_authentication_policy(SessionAuthPolicy()) config.add_authentication_policy(BasicAuthenticationPolicy()) config.add_request_method(authenticated_userid, name="userid", reify=True) config.add_request_method(_forbid, name="forbid") config.add_request_method(_request_login, name="request_login") config.add_request_method(_is_logged_in, name="is_logged_in", reify=True) settings = config.get_settings() realm = settings.get("pypi.realm", "pypi") config.registry.realm = realm
{ "content_hash": "5b79390be054fc557b55b8a12b951111", "timestamp": "", "source": "github", "line_count": 169, "max_line_length": 85, "avg_line_length": 35.33136094674556, "alnum_prop": 0.6749288226427734, "repo_name": "mathcamp/pypicloud", "id": "0142f29091732a34b56db38ad9da08a25d4c10ca", "size": "5971", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pypicloud/auth.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "397" }, { "name": "HTML", "bytes": "24681" }, { "name": "JavaScript", "bytes": "25769" }, { "name": "Python", "bytes": "274128" }, { "name": "Shell", "bytes": "1609" } ], "symlink_target": "" }
import os import unittest from rspub.pluggable.gate import ResourceGateBuilder class TestResourceGateBuilder(unittest.TestCase): def test_path_predicates(self): resource_dir = os.path.expanduser("~") builder = ResourceGateBuilder(resource_dir=resource_dir) includes = builder.build_includes([]) pred = includes[0] # print(resource_dir) a_resource = os.path.join(resource_dir, "file.txt") self.assertTrue(pred(a_resource)) another_resource = os.path.join(os.path.dirname(resource_dir), "file.txt") self.assertFalse(pred(another_resource))
{ "content_hash": "6d58c66fc34a9b5755f427ba499ade89", "timestamp": "", "source": "github", "line_count": 21, "max_line_length": 82, "avg_line_length": 29.571428571428573, "alnum_prop": 0.6827697262479872, "repo_name": "cegesoma/rspub-core", "id": "f5ee9ad47f9cad4d4c5d72058e6571f214f35c9f", "size": "669", "binary": false, "copies": "2", "ref": "refs/heads/cegesoma", "path": "rspub/pluggable/test/test_gate.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "229671" } ], "symlink_target": "" }
from __future__ import annotations import os from datetime import datetime from pathlib import Path from airflow import models from airflow.models.baseoperator import chain from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator from airflow.providers.google.cloud.operators.life_sciences import LifeSciencesRunPipelineOperator from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator from airflow.utils.trigger_rule import TriggerRule ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID") PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT") DAG_ID = "example_life_sciences" BUCKET_NAME = f"bucket_{DAG_ID}-{ENV_ID}" FILE_NAME = "file" LOCATION = "us-central1" CURRENT_FOLDER = Path(__file__).parent FILE_LOCAL_PATH = str(Path(CURRENT_FOLDER) / "resources" / FILE_NAME) # [START howto_configure_simple_action_pipeline] SIMPLE_ACTION_PIPELINE = { "pipeline": { "actions": [ {"imageUri": "bash", "commands": ["-c", "echo Hello, world"]}, ], "resources": { "regions": [f"{LOCATION}"], "virtualMachine": { "machineType": "n1-standard-1", }, }, }, } # [END howto_configure_simple_action_pipeline] # [START howto_configure_multiple_action_pipeline] MULTI_ACTION_PIPELINE = { "pipeline": { "actions": [ { "imageUri": "google/cloud-sdk", "commands": ["gsutil", "cp", f"gs://{BUCKET_NAME}/{FILE_NAME}", "/tmp"], }, {"imageUri": "bash", "commands": ["-c", "echo Hello, world"]}, { "imageUri": "google/cloud-sdk", "commands": [ "gsutil", "cp", f"gs://{BUCKET_NAME}/{FILE_NAME}", f"gs://{BUCKET_NAME}/output.in", ], }, ], "resources": { "regions": [f"{LOCATION}"], "virtualMachine": { "machineType": "n1-standard-1", }, }, } } # [END howto_configure_multiple_action_pipeline] with models.DAG( DAG_ID, schedule="@once", start_date=datetime(2021, 1, 1), catchup=False, tags=["example"], ) as dag: create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME) upload_file = LocalFilesystemToGCSOperator( task_id="upload_file", src=FILE_LOCAL_PATH, dst=FILE_NAME, bucket=BUCKET_NAME, ) # [START howto_run_pipeline] simple_life_science_action_pipeline = LifeSciencesRunPipelineOperator( task_id="simple-action-pipeline", body=SIMPLE_ACTION_PIPELINE, project_id=PROJECT_ID, location=LOCATION, ) # [END howto_run_pipeline] multiple_life_science_action_pipeline = LifeSciencesRunPipelineOperator( task_id="multi-action-pipeline", body=MULTI_ACTION_PIPELINE, project_id=PROJECT_ID, location=LOCATION ) delete_bucket = GCSDeleteBucketOperator( task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE ) chain( # TEST SETUP create_bucket, upload_file, # TEST BODY simple_life_science_action_pipeline, multiple_life_science_action_pipeline, # TEST TEARDOWN delete_bucket, ) from tests.system.utils.watcher import watcher # This test needs watcher in order to properly mark success/failure # when "tearDown" task with trigger rule is part of the DAG list(dag.tasks) >> watcher() from tests.system.utils import get_test_run # noqa: E402 # Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest) test_run = get_test_run(dag)
{ "content_hash": "8272551aeb3b8234332d2f66a7ba4e62", "timestamp": "", "source": "github", "line_count": 125, "max_line_length": 109, "avg_line_length": 30.696, "alnum_prop": 0.6142819911389106, "repo_name": "apache/airflow", "id": "73c4d701b7a143870486ae57068618e6cd9c3bd3", "size": "4624", "binary": false, "copies": "3", "ref": "refs/heads/main", "path": "tests/system/providers/google/cloud/life_sciences/example_life_sciences.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "25980" }, { "name": "Dockerfile", "bytes": "71458" }, { "name": "HCL", "bytes": "3786" }, { "name": "HTML", "bytes": "172957" }, { "name": "JavaScript", "bytes": "143915" }, { "name": "Jinja", "bytes": "38911" }, { "name": "Jupyter Notebook", "bytes": "5482" }, { "name": "Mako", "bytes": "1339" }, { "name": "Python", "bytes": "23697738" }, { "name": "R", "bytes": "313" }, { "name": "Shell", "bytes": "211306" }, { "name": "TypeScript", "bytes": "521019" } ], "symlink_target": "" }
import random nums = [] for i in range(100): nums.append(int((random.random())*10000)) #fills empty nums list with 100 random int's from 0-1000 count=0 while count<(len(nums)-1): min_place=count for i in range(count,len(nums)): if nums[i]<nums[min_place]: min_place=i nums[count], nums[min_place]=nums[min_place], nums[count] count+=1 print nums #prints sorted list
{ "content_hash": "d8fde85845ebab3fea6ec6cc1c0804c0", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 102, "avg_line_length": 21.68421052631579, "alnum_prop": 0.6432038834951457, "repo_name": "dallaspythondojo/python", "id": "09d9c318ca543d40256519af0d891fbfe65a5aac", "size": "412", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Tan_ShinYi/Assignments/Python_Fundamentals/Selection_Sort.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "25381" }, { "name": "HTML", "bytes": "256675" }, { "name": "JavaScript", "bytes": "528" }, { "name": "Python", "bytes": "399336" } ], "symlink_target": "" }
import logging from decorator.injectdbsession import inject_db_session from .entity import Album from sqlalchemy.orm import lazyload @inject_db_session() class AlbumRepo: logger = logging.getLogger(__name__) def get_albums_by_name(self, album_name): query = self._session.query(Album).options( lazyload("tracks.repositories")).filter(Album.name == album_name) return query.all() def save(self, album): try: self._session.add(album) self._session.flush() self._session.commit() return album except: self._session.rollback() raise def delete(self, album): try: self._session.delete(album) self._session.commit() except: self._session.rollback() raise
{ "content_hash": "6621c4e6bb9a69d6bc52fe83c18045e6", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 77, "avg_line_length": 25.78787878787879, "alnum_prop": 0.5863689776733255, "repo_name": "rockers7414/xmusic-crawler", "id": "90372f8c44180f7c05f1c06a2e4a8062a8a008c7", "size": "851", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "daemon/database/albumrepo.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "60823" } ], "symlink_target": "" }
"""Notebook implementation of _Renderer and GUI.""" # Authors: Guillaume Favelier <guillaume.favelier@gmail.com> # # License: Simplified BSD from contextlib import contextmanager, nullcontext from distutils.version import LooseVersion import pyvista from IPython.display import display from ipywidgets import (Button, Dropdown, FloatSlider, FloatText, HBox, IntSlider, IntText, Text, VBox, IntProgress, Play, jsdlink) from ._abstract import (_AbstractDock, _AbstractToolBar, _AbstractMenuBar, _AbstractStatusBar, _AbstractLayout, _AbstractWidget, _AbstractWindow, _AbstractMplCanvas, _AbstractPlayback, _AbstractBrainMplCanvas, _AbstractMplInterface) from ._pyvista import _PyVistaRenderer, _close_all, _set_3d_view, _set_3d_title # noqa: F401,E501, analysis:ignore class _IpyLayout(_AbstractLayout): def _layout_initialize(self, max_width): self._layout_max_width = max_width def _layout_add_widget(self, layout, widget, stretch=0): widget.layout.margin = "2px 0px 2px 0px" if not isinstance(widget, Play): widget.layout.min_width = "0px" children = list(layout.children) children.append(widget) layout.children = tuple(children) # Fix columns if self._layout_max_width is not None and isinstance(widget, HBox): children = widget.children width = int(self._layout_max_width / len(children)) for child in children: child.layout.width = f"{width}px" class _IpyDock(_AbstractDock, _IpyLayout): def _dock_initialize(self, window=None): self._dock_width = 300 self._dock = self._dock_layout = VBox() self._dock.layout.width = f"{self._dock_width}px" self._layout_initialize(self._dock_width) def _dock_finalize(self): pass def _dock_show(self): self._dock_layout.layout.visibility = "visible" def _dock_hide(self): self._dock_layout.layout.visibility = "hidden" def _dock_add_stretch(self, layout): pass def _dock_add_layout(self, vertical=True): return VBox() if vertical else HBox() def _dock_add_label(self, value, align=False, layout=None): layout = self._dock_layout if layout is None else layout widget = Text(value=value, disabled=True) self._layout_add_widget(layout, widget) return _IpyWidget(widget) def _dock_add_button(self, name, callback, layout=None): widget = Button(description=name) widget.on_click(lambda x: callback()) self._layout_add_widget(layout, widget) return _IpyWidget(widget) def _dock_named_layout(self, name, layout, compact): layout = self._dock_layout if layout is None else layout if name is not None: hlayout = self._dock_add_layout(not compact) self._dock_add_label( value=name, align=not compact, layout=hlayout) self._layout_add_widget(layout, hlayout) layout = hlayout return layout def _dock_add_slider(self, name, value, rng, callback, compact=True, double=False, layout=None): layout = self._dock_named_layout(name, layout, compact) klass = FloatSlider if double else IntSlider widget = klass( value=value, min=rng[0], max=rng[1], readout=False, ) widget.observe(_generate_callback(callback), names='value') self._layout_add_widget(layout, widget) return _IpyWidget(widget) def _dock_add_spin_box(self, name, value, rng, callback, compact=True, double=True, layout=None): layout = self._dock_named_layout(name, layout, compact) klass = FloatText if double else IntText widget = klass( value=value, min=rng[0], max=rng[1], readout=False, ) widget.observe(_generate_callback(callback), names='value') self._layout_add_widget(layout, widget) return _IpyWidget(widget) def _dock_add_combo_box(self, name, value, rng, callback, compact=True, layout=None): layout = self._dock_named_layout(name, layout, compact) widget = Dropdown( value=value, options=rng, ) widget.observe(_generate_callback(callback), names='value') self._layout_add_widget(layout, widget) return _IpyWidget(widget) def _dock_add_group_box(self, name, layout=None): layout = self._dock_layout if layout is None else layout hlayout = VBox() self._layout_add_widget(layout, hlayout) return hlayout def _generate_callback(callback, to_float=False): def func(data): value = data["new"] if "new" in data else data["old"] callback(float(value) if to_float else value) return func class _IpyToolBar(_AbstractToolBar, _IpyLayout): def _tool_bar_load_icons(self): self.icons = dict() self.icons["help"] = "question" self.icons["play"] = None self.icons["pause"] = None self.icons["reset"] = "history" self.icons["scale"] = "magic" self.icons["clear"] = "trash" self.icons["movie"] = "video-camera" self.icons["restore"] = "replay" self.icons["screenshot"] = "camera" self.icons["visibility_on"] = "eye" self.icons["visibility_off"] = "eye" def _tool_bar_initialize(self, name="default", window=None): self.actions = dict() self._tool_bar = self._tool_bar_layout = HBox() self._layout_initialize(None) def _tool_bar_add_button(self, name, desc, func, icon_name=None, shortcut=None): icon_name = name if icon_name is None else icon_name icon = self.icons[icon_name] if icon is None: return widget = Button(tooltip=desc, icon=icon) widget.on_click(lambda x: func()) self._layout_add_widget(self._tool_bar_layout, widget) self.actions[name] = widget def _tool_bar_update_button_icon(self, name, icon_name): self.actions[name].icon = self.icons[icon_name] def _tool_bar_add_text(self, name, value, placeholder): widget = Text(value=value, placeholder=placeholder) self._layout_add_widget(self._tool_bar_layout, widget) self.actions[name] = widget def _tool_bar_add_spacer(self): pass def _tool_bar_add_file_button(self, name, desc, func, shortcut=None): def callback(): fname = self.actions[f"{name}_field"].value func(None if len(fname) == 0 else fname) self._tool_bar_add_text( name=f"{name}_field", value=None, placeholder="Type a file name", ) self._tool_bar_add_button( name=name, desc=desc, func=callback, ) def _tool_bar_add_play_button(self, name, desc, func, shortcut=None): widget = Play(interval=500) self._layout_add_widget(self._tool_bar_layout, widget) self.actions[name] = widget return _IpyWidget(widget) def _tool_bar_set_theme(self, theme): pass class _IpyMenuBar(_AbstractMenuBar): def _menu_initialize(self, window=None): pass def _menu_add_submenu(self, name, desc): pass def _menu_add_button(self, menu_name, name, desc, func): pass class _IpyStatusBar(_AbstractStatusBar, _IpyLayout): def _status_bar_initialize(self, window=None): self._status_bar = self._status_bar_layout = HBox() self._layout_initialize(None) def _status_bar_add_label(self, value, stretch=0): widget = Text(value=value, disabled=True) self._layout_add_widget(self._status_bar_layout, widget) return _IpyWidget(widget) def _status_bar_add_progress_bar(self, stretch=0): widget = IntProgress() self._layout_add_widget(self._status_bar_layout, widget) return _IpyWidget(widget) def _status_bar_update(self): pass class _IpyPlayback(_AbstractPlayback): def _playback_initialize(self, func, timeout, value, rng, time_widget, play_widget): play = play_widget._widget play.min = rng[0] play.max = rng[1] play.value = value slider = time_widget._widget jsdlink((play, 'value'), (slider, 'value')) jsdlink((slider, 'value'), (play, 'value')) class _IpyMplInterface(_AbstractMplInterface): def _mpl_initialize(self): from matplotlib.backends.backend_nbagg import (FigureCanvasNbAgg, FigureManager) self.canvas = FigureCanvasNbAgg(self.fig) self.manager = FigureManager(self.canvas, 0) class _IpyMplCanvas(_AbstractMplCanvas, _IpyMplInterface): def __init__(self, width, height, dpi): super().__init__(width, height, dpi) self._mpl_initialize() class _IpyBrainMplCanvas(_AbstractBrainMplCanvas, _IpyMplInterface): def __init__(self, brain, width, height, dpi): super().__init__(brain, width, height, dpi) self._mpl_initialize() self._connect() class _IpyWindow(_AbstractWindow): def _window_close_connect(self, func): pass def _window_get_dpi(self): return 96 def _window_get_size(self): return self.figure.plotter.window_size def _window_get_simple_canvas(self, width, height, dpi): return _IpyMplCanvas(width, height, dpi) def _window_get_mplcanvas(self, brain, interactor_fraction, show_traces, separate_canvas): w, h = self._window_get_mplcanvas_size(interactor_fraction) self._interactor_fraction = interactor_fraction self._show_traces = show_traces self._separate_canvas = separate_canvas self._mplcanvas = _IpyBrainMplCanvas( brain, w, h, self._window_get_dpi()) return self._mplcanvas def _window_adjust_mplcanvas_layout(self): pass def _window_get_cursor(self): pass def _window_set_cursor(self, cursor): pass def _window_new_cursor(self, name): pass @contextmanager def _window_ensure_minimum_sizes(self): yield def _window_set_theme(self, theme): pass class _IpyWidget(_AbstractWidget): def set_value(self, value): self._widget.value = value def get_value(self): return self._widget.value def set_range(self, rng): self._widget.min = rng[0] self._widget.max = rng[1] def show(self): self._widget.layout.visibility = "visible" def hide(self): self._widget.layout.visibility = "hidden" def update(self, repaint=True): pass class _Renderer(_PyVistaRenderer, _IpyDock, _IpyToolBar, _IpyMenuBar, _IpyStatusBar, _IpyWindow, _IpyPlayback): def __init__(self, *args, **kwargs): self._dock = None self._tool_bar = None self._status_bar = None kwargs["notebook"] = True super().__init__(*args, **kwargs) def _update(self): if self.figure.display is not None: self.figure.display.update_canvas() def _create_default_tool_bar(self): self._tool_bar_load_icons() self._tool_bar_initialize() self._tool_bar_add_file_button( name="screenshot", desc="Take a screenshot", func=self.screenshot, ) def show(self): # default tool bar if self._tool_bar is None: self._create_default_tool_bar() display(self._tool_bar) # viewer if LooseVersion(pyvista.__version__) < LooseVersion('0.30'): viewer = self.plotter.show( use_ipyvtk=True, return_viewer=True) else: # pyvista>=0.30.0 viewer = self.plotter.show( jupyter_backend="ipyvtklink", return_viewer=True) viewer.layout.width = None # unlock the fixed layout # main widget if self._dock is None: main_widget = viewer else: main_widget = HBox([self._dock, viewer]) display(main_widget) self.figure.display = viewer # status bar if self._status_bar is not None: display(self._status_bar) return self.scene() _testing_context = nullcontext
{ "content_hash": "3fa0553406660845b0057944b19bd413", "timestamp": "", "source": "github", "line_count": 381, "max_line_length": 115, "avg_line_length": 33.25459317585302, "alnum_prop": 0.5996053670086819, "repo_name": "rkmaddox/mne-python", "id": "5eb2ae10bdc21405bfe3504fadf58987ef6f153e", "size": "12670", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mne/viz/backends/_notebook.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "3114" }, { "name": "PowerShell", "bytes": "2988" }, { "name": "Python", "bytes": "4400215" }, { "name": "Shell", "bytes": "936" } ], "symlink_target": "" }
import base64 import os import re import time import datetime import hashlib import logging import string import StringIO import random import math from google.appengine.ext import webapp from google.appengine.api import memcache from google.appengine.api import images from google.appengine.ext import db from google.appengine.ext.webapp import util from google.appengine.ext.webapp import template from v2ex.babel import Avatar from v2ex.babel import Member from v2ex.babel import Counter from v2ex.babel import Section from v2ex.babel import Node from v2ex.babel import Site from v2ex.babel import Minisite from v2ex.babel import Page from v2ex.babel import SYSTEM_VERSION from v2ex.babel.security import * from v2ex.babel.ext.cookies import Cookies from v2ex.babel.ua import * from v2ex.babel.da import * from v2ex.babel.l10n import * from v2ex.babel.handlers import BaseHandler template.register_template_library('v2ex.templatetags.filters') import config class BackstageHomeHandler(webapp.RequestHandler): def get(self): site = GetSite() browser = detect(self.request) member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values = {} template_values['l10n'] = l10n template_values['site'] = site template_values['rnd'] = random.randrange(1, 100) template_values['system_version'] = SYSTEM_VERSION template_values['member'] = member template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') member_total = memcache.get('member_total') if member_total is None: q3 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'member.total'") if (q3.count() > 0): member_total = q3[0].value else: member_total = 0 memcache.set('member_total', member_total, 600) template_values['member_total'] = member_total topic_total = memcache.get('topic_total') if topic_total is None: q4 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'topic.total'") if (q4.count() > 0): topic_total = q4[0].value else: topic_total = 0 memcache.set('topic_total', topic_total, 600) template_values['topic_total'] = topic_total reply_total = memcache.get('reply_total') if reply_total is None: q5 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'reply.total'") if (q5.count() > 0): reply_total = q5[0].value else: reply_total = 0 memcache.set('reply_total', reply_total, 600) template_values['reply_total'] = reply_total if (member): if (member.level == 0): q = db.GqlQuery("SELECT * FROM Section ORDER BY nodes DESC") template_values['sections'] = q q2 = db.GqlQuery("SELECT * FROM Member ORDER BY created DESC LIMIT 5") template_values['latest_members'] = q2 q3 = db.GqlQuery("SELECT * FROM Minisite ORDER BY created DESC") template_values['minisites'] = q3 q4 = db.GqlQuery("SELECT * FROM Node ORDER BY last_modified DESC LIMIT 8") template_values['latest_nodes'] = q4 if browser['ios']: path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_home.html') else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_home.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') class BackstageNewMinisiteHandler(webapp.RequestHandler): def get(self): site = GetSite() template_values = {} template_values['site'] = site template_values['page_title'] = site.title + u' › 添加新站点' template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_new_minisite.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') def post(self): site = GetSite() template_values = {} template_values['site'] = site template_values['page_title'] = site.title + u' › 添加新站点' template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): errors = 0 # Verification: name minisite_name_error = 0 minisite_name_error_messages = ['', u'请输入站点名', u'站点名长度不能超过 32 个字符', u'站点名只能由 a-Z 0-9 及 - 和 _ 组成', u'抱歉这个站点名已经存在了'] minisite_name = self.request.get('name').strip().lower() if (len(minisite_name) == 0): errors = errors + 1 minisite_name_error = 1 else: if (len(minisite_name) > 32): errors = errors + 1 minisite_name_error = 2 else: if (re.search('^[a-zA-Z0-9\-\_]+$', minisite_name)): q = db.GqlQuery('SELECT __key__ FROM Minisite WHERE name = :1', minisite_name.lower()) if (q.count() > 0): errors = errors + 1 minisite_name_error = 4 else: errors = errors + 1 minisite_name_error = 3 template_values['minisite_name'] = minisite_name template_values['minisite_name_error'] = minisite_name_error template_values['minisite_name_error_message'] = minisite_name_error_messages[minisite_name_error] # Verification: title minisite_title_error = 0 minisite_title_error_messages = ['', u'请输入站点标题', u'站点标题长度不能超过 32 个字符' ] minisite_title = self.request.get('title').strip() if (len(minisite_title) == 0): errors = errors + 1 minisite_title_error = 1 else: if (len(minisite_title) > 32): errors = errors + 1 minisite_title_error = 2 template_values['minisite_title'] = minisite_title template_values['minisite_title_error'] = minisite_title_error template_values['minisite_title_error_message'] = minisite_title_error_messages[minisite_title_error] # Verification: description minisite_description_error = 0 minisite_description_error_messages = ['', u'请输入站点描述', u'站点描述长度不能超过 2000 个字符' ] minisite_description = self.request.get('description').strip() if (len(minisite_description) == 0): errors = errors + 1 minisite_description_error = 1 else: if (len(minisite_description) > 2000): errors = errors + 1 minisite_description_error = 2 template_values['minisite_description'] = minisite_description template_values['minisite_description_error'] = minisite_description_error template_values['minisite_description_error_message'] = minisite_description_error_messages[minisite_description_error] template_values['errors'] = errors if (errors == 0): minisite = Minisite() q = db.GqlQuery('SELECT * FROM Counter WHERE name = :1', 'minisite.max') if (q.count() == 1): counter = q[0] counter.value = counter.value + 1 else: counter = Counter() counter.name = 'minisite.max' counter.value = 1 minisite.num = counter.value minisite.name = minisite_name minisite.title = minisite_title minisite.description = minisite_description minisite.put() counter.put() self.redirect('/backstage') else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_new_minisite.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') class BackstageMinisiteHandler(webapp.RequestHandler): def get(self, minisite_name): site = GetSite() template_values = {} template_values['site'] = site template_values['page_title'] = site.title + u' › Minisite' template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): minisite = GetKindByName('Minisite', minisite_name) if minisite is not False: template_values['minisite'] = minisite template_values['page_title'] = site.title + u' › ' + minisite.title q = db.GqlQuery("SELECT * FROM Page WHERE minisite = :1 ORDER BY weight ASC", minisite) template_values['pages'] = q path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_minisite.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/backstage') else: self.redirect('/') else: self.redirect('/signin') class BackstageNewPageHandler(webapp.RequestHandler): def get(self, minisite_name): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): minisite = GetKindByName('Minisite', minisite_name) if minisite is not False: template_values['minisite'] = minisite template_values['page_title'] = site.title + u' › ' + minisite.title + u' › 添加新页面' template_values['page_content_type'] = 'text/html;charset=utf-8' template_values['page_weight'] = 0 template_values['page_mode'] = 0 path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_new_page.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/backstage') else: self.redirect('/') else: self.redirect('/signin') def post(self, minisite_name): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): minisite = GetKindByName('Minisite', minisite_name) if minisite is False: self.redirect('/backstage') else: template_values['minisite'] = minisite template_values['page_title'] = site.title + u' › ' + minisite.title + u' › 添加新页面' errors = 0 # Verification: name page_name_error = 0 page_name_error_messages = ['', u'请输入页面名', u'页面名长度不能超过 64 个字符', u'页面名只能由 a-Z 0-9 及 . - _ 组成', u'抱歉这个页面名已经存在了'] page_name = self.request.get('name').strip().lower() if (len(page_name) == 0): errors = errors + 1 page_name_error = 1 else: if (len(page_name) > 64): errors = errors + 1 page_name_error = 2 else: if (re.search('^[a-zA-Z0-9\-\_\.]+$', page_name)): q = db.GqlQuery('SELECT * FROM Page WHERE name = :1', page_name.lower()) if (q.count() > 0): if q[0].minisite.name == minisite.name: errors = errors + 1 page_name_error = 4 else: errors = errors + 1 page_name_error = 3 template_values['page_name'] = page_name template_values['page_name_error'] = page_name_error template_values['page_name_error_message'] = page_name_error_messages[page_name_error] # Verification: title page_t_error = 0 page_t_error_messages = ['', u'请输入页面标题', u'页面标题长度不能超过 100 个字符' ] page_t = self.request.get('t').strip() if (len(page_t) == 0): errors = errors + 1 page_t_error = 1 else: if (len(page_t) > 100): errors = errors + 1 page_t_error = 2 template_values['page_t'] = page_t template_values['page_t_error'] = page_t_error template_values['page_t_error_message'] = page_t_error_messages[page_t_error] # Verification: content page_content_error = 0 page_content_error_messages = ['', u'请输入页面内容', u'页面内容长度不能超过 200000 个字符' ] page_content = self.request.get('content').strip() if (len(page_content) == 0): errors = errors + 1 page_content_error = 1 else: if (len(page_content) > 200000): errors = errors + 1 page_content_error = 2 template_values['page_content'] = page_content template_values['page_content_error'] = page_content_error template_values['page_content_error_message'] = page_content_error_messages[page_content_error] # Verification: mode page_mode = 0 page_mode = self.request.get('mode').strip() if page_mode == '1': page_mode = 1 else: page_mode = 0 # Verification: content_type page_content_type = self.request.get('content_type').strip() if (len(page_content_type) == 0): page_content_type = 'text/html;charset=utf-8' else: if (len(page_content_type) > 40): page_content_type = 'text/html;charset=utf-8' template_values['page_content_type'] = page_content_type # Verification: weight page_weight = self.request.get('weight').strip() if (len(page_content_type) == 0): page_content_type = 0 else: if (len(page_weight) > 9): page_weight = 0 else: try: page_weight = int(page_weight) except: page_weight = 0 template_values['page_weight'] = page_weight template_values['errors'] = errors if (errors == 0): page = Page(parent=minisite) q = db.GqlQuery('SELECT * FROM Counter WHERE name = :1', 'page.max') if (q.count() == 1): counter = q[0] counter.value = counter.value + 1 else: counter = Counter() counter.name = 'page.max' counter.value = 1 q2 = db.GqlQuery('SELECT * FROM Counter WHERE name = :1', 'page.total') if (q2.count() == 1): counter2 = q[0] counter2.value = counter.value + 1 else: counter2 = Counter() counter2.name = 'page.total' counter2.value = 1 page.num = counter.value page.name = page_name page.title = page_t page.content = page_content if page_mode == 1: from django.template import Context, Template t = Template(page_content) c = Context({"site" : site, "minisite" : page.minisite, "page" : page}) output = t.render(c) page.content_rendered = output else: page.content_rendered = page_content page.content_type = page_content_type page.weight = page_weight page.mode = page_mode page.minisite = minisite page.put() counter.put() counter2.put() minisite.pages = minisite.pages + 1 minisite.put() memcache.delete('Minisite_' + str(minisite.num)) memcache.delete('Minisite::' + str(minisite.name)) self.redirect('/backstage/minisite/' + minisite.name) else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_new_page.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') class BackstageRemoveMinisiteHandler(webapp.RequestHandler): def get(self, minisite_key): member = CheckAuth(self) if member: if member.level == 0: minisite = db.get(db.Key(minisite_key)) if minisite: # Delete all contents pages = db.GqlQuery("SELECT * FROM Page WHERE minisite = :1", minisite) for page in pages: memcache.delete('Page_' + str(page.num)) memcache.delete('Page::' + str(page.name)) memcache.delete(minisite.name + '/' + page.name) page.delete() minisite.pages = 0 minisite.put() # Delete the minisite memcache.delete('Minisite_' + str(minisite.num)) memcache.delete('Minisite::' + str(minisite.name)) minisite.delete() self.redirect('/backstage') else: self.redirect('/backstage') else: self.redirect('/') else: self.redirect('/signin') class BackstagePageHandler(webapp.RequestHandler): def get(self, page_key): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): page = db.get(db.Key(page_key)) if page: minisite = page.minisite template_values['page'] = page template_values['minisite'] = minisite template_values['page_title'] = site.title + u' › ' + minisite.title + u' › ' + page.title + u' › 编辑' template_values['page_name'] = page.name template_values['page_t'] = page.title template_values['page_content'] = page.content template_values['page_content_type'] = page.content_type template_values['page_mode'] = page.mode template_values['page_weight'] = page.weight path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_page.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/backstage') else: self.redirect('/') else: self.redirect('/signin') def post(self, page_key): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): page = db.get(db.Key(page_key)) if page: minisite = page.minisite template_values['minisite'] = minisite template_values['page_title'] = site.title + u' › ' + minisite.title + u' › 添加新页面' errors = 0 # Verification: name page_name_error = 0 page_name_error_messages = ['', u'请输入页面名', u'页面名长度不能超过 64 个字符', u'页面名只能由 a-Z 0-9 及 . - _ 组成', u'抱歉这个页面名已经存在了'] page_name = self.request.get('name').strip().lower() if (len(page_name) == 0): errors = errors + 1 page_name_error = 1 else: if (len(page_name) > 64): errors = errors + 1 page_name_error = 2 else: if (re.search('^[a-zA-Z0-9\-\_\.]+$', page_name)): q = db.GqlQuery('SELECT * FROM Page WHERE name = :1 AND minisite = :2', page_name.lower(), page.minisite) if (q.count() > 0): if q[0].num != page.num: errors = errors + 1 page_name_error = 4 else: errors = errors + 1 page_name_error = 3 template_values['page_name'] = page_name template_values['page_name_error'] = page_name_error template_values['page_name_error_message'] = page_name_error_messages[page_name_error] # Verification: title page_t_error = 0 page_t_error_messages = ['', u'请输入页面标题', u'页面标题长度不能超过 100 个字符' ] page_t = self.request.get('t').strip() if (len(page_t) == 0): errors = errors + 1 page_t_error = 1 else: if (len(page_t) > 100): errors = errors + 1 page_t_error = 2 template_values['page_t'] = page_t template_values['page_t_error'] = page_t_error template_values['page_t_error_message'] = page_t_error_messages[page_t_error] # Verification: content page_content_error = 0 page_content_error_messages = ['', u'请输入页面内容', u'页面内容长度不能超过 200000 个字符' ] page_content = self.request.get('content').strip() if (len(page_content) == 0): errors = errors + 1 page_content_error = 1 else: if (len(page_content) > 200000): errors = errors + 1 page_content_error = 2 template_values['page_content'] = page_content template_values['page_content_error'] = page_content_error template_values['page_content_error_message'] = page_content_error_messages[page_content_error] # Verification: mode page_mode = 0 page_mode = self.request.get('mode').strip() if page_mode == '1': page_mode = 1 else: page_mode = 0 # Verification: content_type page_content_type = self.request.get('content_type').strip() if (len(page_content_type) == 0): page_content_type = 'text/html;charset=utf-8' else: if (len(page_content_type) > 40): page_content_type = 'text/html;charset=utf-8' template_values['page_content_type'] = page_content_type # Verification: weight page_weight = self.request.get('weight').strip() if (len(page_content_type) == 0): page_content_type = 0 else: if (len(page_weight) > 9): page_weight = 0 else: try: page_weight = int(page_weight) except: page_weight = 0 template_values['page_weight'] = page_weight template_values['errors'] = errors if (errors == 0): page.name = page_name page.title = page_t page.content = page_content if page.mode == 1: from django.template import Context, Template t = Template(page_content) c = Context({"site" : site, "minisite" : page.minisite, "page" : page}) output = t.render(c) page.content_rendered = output else: page.content_rendered = page_content page.content_type = page_content_type page.mode = page_mode page.weight = page_weight page.put() memcache.delete('Page_' + str(page.num)) memcache.delete('Page::' + str(page.name)) memcache.delete(minisite.name + '/' + page.name) self.redirect('/backstage/minisite/' + minisite.name) else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_page.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/backstage') else: self.redirect('/') else: self.redirect('/signin') class BackstageRemovePageHandler(webapp.RequestHandler): def get(self, page_key): member = CheckAuth(self) if member: if member.level == 0: page = db.get(db.Key(page_key)) if page: memcache.delete('Page_' + str(page.num)) memcache.delete('Page::' + str(page.name)) memcache.delete(page.minisite.name + '/' + page.name) minisite = page.minisite page.delete() minisite.pages = minisite.pages - 1 minisite.put() self.redirect('/backstage/minisite/' + minisite.name) else: self.redirect('/') else: self.redirect('/signin') class BackstageNewSectionHandler(webapp.RequestHandler): def get(self): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_new_section.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') def post(self): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) template_values['member'] = member l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): errors = 0 # Verification: name section_name_error = 0 section_name_error_messages = ['', u'请输入区域名', u'区域名长度不能超过 32 个字符', u'区域名只能由 a-Z 0-9 及 - 和 _ 组成', u'抱歉这个区域名已经存在了'] section_name = self.request.get('name').strip().lower() if (len(section_name) == 0): errors = errors + 1 section_name_error = 1 else: if (len(section_name) > 32): errors = errors + 1 section_name_error = 2 else: if (re.search('^[a-zA-Z0-9\-\_]+$', section_name)): q = db.GqlQuery('SELECT __key__ FROM Section WHERE name = :1', section_name.lower()) if (q.count() > 0): errors = errors + 1 section_name_error = 4 else: errors = errors + 1 section_name_error = 3 template_values['section_name'] = section_name template_values['section_name_error'] = section_name_error template_values['section_name_error_message'] = section_name_error_messages[section_name_error] # Verification: title section_title_error = 0 section_title_error_messages = ['', u'请输入区域标题', u'区域标题长度不能超过 32 个字符' ] section_title = self.request.get('title').strip() if (len(section_title) == 0): errors = errors + 1 section_title_error = 1 else: if (len(section_title) > 32): errors = errors + 1 section_title_error = 2 template_values['section_title'] = section_title template_values['section_title_error'] = section_title_error template_values['section_title_error_message'] = section_title_error_messages[section_title_error] # Verification: title section_title_alternative_error = 0 section_title_alternative_error_messages = ['', u'请输入区域副标题', u'区域标题长度不能超过 32 个字符' ] section_title_alternative = self.request.get('title_alternative').strip() if (len(section_title_alternative) == 0): errors = errors + 1 section_title_alternative_error = 1 else: if (len(section_title_alternative) > 32): errors = errors + 1 section_title_alternative_error = 2 template_values['section_title_alternative'] = section_title_alternative template_values['section_title_alternative_error'] = section_title_alternative_error template_values['section_title_alternative_error_message'] = section_title_alternative_error_messages[section_title_alternative_error] template_values['errors'] = errors if (errors == 0): section = Section() q = db.GqlQuery('SELECT * FROM Counter WHERE name = :1', 'section.max') if (q.count() == 1): counter = q[0] counter.value = counter.value + 1 else: counter = Counter() counter.name = 'section.max' counter.value = 1 section.num = counter.value section.name = section_name section.title = section_title section.title_alternative = section_title_alternative section.put() counter.put() self.redirect('/backstage') else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_new_section.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') class BackstageSectionHandler(webapp.RequestHandler): def get(self, section_name): site = GetSite() browser = detect(self.request) template_values = {} template_values['rnd'] = random.randrange(1, 100) template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): template_values['member'] = member q = db.GqlQuery("SELECT * FROM Section WHERE name = :1", section_name) section = False if (q.count() == 1): section = q[0] template_values['section'] = section template_values['page_title'] = site.title + u' › 后台 › ' + section.title template_values['section_name'] = section.name template_values['section_title'] = section.title template_values['section_title_alternative'] = section.title_alternative if section.header: template_values['section_header'] = section.header else: template_values['section_header'] = '' if section.footer: template_values['section_footer'] = section.footer else: template_values['section_footer'] = '' else: template_values['section'] = section if (section): template_values['section'] = section q2 = db.GqlQuery("SELECT * FROM Node WHERE section_num = :1 ORDER BY last_modified DESC LIMIT 10", section.num) template_values['recent_modified'] = q2 else: template_values['nodes'] = False if browser['ios']: path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_section.html') else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_section.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') def post(self, section_name): site = GetSite() browser = detect(self.request) template_values = {} template_values['rnd'] = random.randrange(1, 100) template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: template_values['member'] = member section = GetKindByName('Section', section_name) if section is not False: template_values['section'] = section errors = 0 # Verification: name section_name_error = 0 section_name_error_messages = ['', u'请输入区域名', u'区域名长度不能超过 32 个字符', u'区域名只能由 a-Z 0-9 及 - 和 _ 组成', u'抱歉这个区域名已经存在了'] section_name = self.request.get('name').strip().lower() if (len(section_name) == 0): errors = errors + 1 section_name_error = 1 else: if (len(section_name) > 32): errors = errors + 1 section_name_error = 2 else: if (re.search('^[a-zA-Z0-9\-\_]+$', section_name)): q = db.GqlQuery('SELECT * FROM Section WHERE name = :1', section_name.lower()) if (q.count() > 0): for possible_conflict in q: if possible_conflict.num != section.num: errors = errors + 1 section_name_error = 4 else: errors = errors + 1 section_name_error = 3 template_values['section_name'] = section_name template_values['section_name_error'] = section_name_error template_values['section_name_error_message'] = section_name_error_messages[section_name_error] # Verification: title section_title_error = 0 section_title_error_messages = ['', u'请输入区域标题', u'区域标题长度不能超过 32 个字符' ] section_title = self.request.get('title').strip() if (len(section_title) == 0): errors = errors + 1 section_title_error = 1 else: if (len(section_title) > 32): errors = errors + 1 section_title_error = 2 template_values['section_title'] = section_title template_values['section_title_error'] = section_title_error template_values['section_title_error_message'] = section_title_error_messages[section_title_error] # Verification: title_alternative section_title_alternative_error = 0 section_title_alternative_error_messages = ['', u'请输入区域副标题', u'区域标题长度不能超过 32 个字符' ] section_title_alternative = self.request.get('title_alternative').strip() if (len(section_title_alternative) == 0): errors = errors + 1 section_title_alternative_error = 1 else: if (len(section_title_alternative) > 32): errors = errors + 1 section_title_alternative_error = 2 template_values['section_title_alternative'] = section_title_alternative template_values['section_title_alternative_error'] = section_title_alternative_error template_values['section_title_alternative_error_message'] = section_title_alternative_error_messages[section_title_alternative_error] # Verification: header section_header_error = 0 section_header_error_messages = ['', u'区域头部信息不能超过 1000 个字符' ] section_header = self.request.get('header').strip() if len(section_header) > 1000: errors = errors + 1 section_header_error = 1 template_values['section_header'] = section_header template_values['section_header_error'] = section_header_error template_values['section_header_error_message'] = section_header_error_messages[section_header_error] # Verification: footer section_footer_error = 0 section_footer_error_messages = ['', u'区域尾部信息不能超过 1000 个字符' ] section_footer = self.request.get('footer').strip() if len(section_footer) > 1000: errors = errors + 1 section_footer_error = 1 template_values['section_footer'] = section_footer template_values['section_footer_error'] = section_footer_error template_values['section_footer_error_message'] = section_footer_error_messages[section_footer_error] template_values['errors'] = errors if (errors == 0): memcache.delete('Section::' + section.name) section.name = section_name section.title = section_title section.title_alternative = section_title_alternative section.header = section_header section.footer = section_footer section.put() memcache.delete('Section_' + str(section.num)) memcache.delete('Section::' + section_name) self.redirect('/backstage') else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_section.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/backstage') else: self.redirect('/') else: self.redirect('/signin') class BackstageNewNodeHandler(webapp.RequestHandler): def get(self, section_name): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): template_values['member'] = CheckAuth(self) q = db.GqlQuery("SELECT * FROM Section WHERE name = :1", section_name) if (q.count() == 1): template_values['section'] = q[0] else: template_values['section'] = False path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_new_node.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') def post(self, section_name): site = GetSite() template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): template_values['member'] = member section = False q = db.GqlQuery("SELECT * FROM Section WHERE name = :1", section_name) if (q.count() == 1): section = q[0] template_values['section'] = section else: template_values['section'] = False errors = 0 # Verification: name node_name_error = 0 node_name_error_messages = ['', u'请输入节点名', u'节点名长度不能超过 32 个字符', u'节点名只能由 a-Z 0-9 及 - 和 _ 组成', u'抱歉这个节点名已经存在了'] node_name = self.request.get('name').strip().lower() if (len(node_name) == 0): errors = errors + 1 node_name_error = 1 else: if (len(node_name) > 32): errors = errors + 1 node_name_error = 2 else: if (re.search('^[a-zA-Z0-9\-\_]+$', node_name)): q = db.GqlQuery('SELECT __key__ FROM Node WHERE name = :1', node_name.lower()) if (q.count() > 0): errors = errors + 1 node_name_error = 4 else: errors = errors + 1 node_name_error = 3 template_values['node_name'] = node_name template_values['node_name_error'] = node_name_error template_values['node_name_error_message'] = node_name_error_messages[node_name_error] # Verification: title node_title_error = 0 node_title_error_messages = ['', u'请输入节点标题', u'节点标题长度不能超过 32 个字符' ] node_title = self.request.get('title').strip() if (len(node_title) == 0): errors = errors + 1 node_title_error = 1 else: if (len(node_title) > 32): errors = errors + 1 node_title_error = 2 template_values['node_title'] = node_title template_values['node_title_error'] = node_title_error template_values['node_title_error_message'] = node_title_error_messages[node_title_error] # Verification: title node_title_alternative_error = 0 node_title_alternative_error_messages = ['', u'请输入节点副标题', u'节点标题长度不能超过 32 个字符' ] node_title_alternative = self.request.get('title_alternative').strip() if (len(node_title_alternative) == 0): errors = errors + 1 node_title_alternative_error = 1 else: if (len(node_title_alternative) > 32): errors = errors + 1 node_title_alternative_error = 2 template_values['node_title_alternative'] = node_title_alternative template_values['node_title_alternative_error'] = node_title_alternative_error template_values['node_title_alternative_error_message'] = node_title_alternative_error_messages[node_title_alternative_error] template_values['errors'] = errors if (errors == 0): node = Node() q = db.GqlQuery('SELECT * FROM Counter WHERE name = :1', 'node.max') if (q.count() == 1): counter = q[0] counter.value = counter.value + 1 else: counter = Counter() counter.name = 'node.max' counter.value = 1 section.nodes = section.nodes + 1 node.num = counter.value node.section_num = section.num node.name = node_name node.title = node_title node.title_alternative = node_title_alternative node.put() section.put() counter.put() memcache.delete('index_categories') memcache.delete('home_nodes_new') self.redirect('/backstage/node/' + node.name) else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_new_node.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') class BackstageNodeHandler(webapp.RequestHandler): def get(self, node_name): site = GetSite() browser = detect(self.request) template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): template_values['member'] = member q = db.GqlQuery("SELECT * FROM Node WHERE name = :1", node_name) if (q.count() == 1): node = q[0] if node.parent_node_name is None: siblings = [] else: siblings = db.GqlQuery("SELECT * FROM Node WHERE parent_node_name = :1 AND name != :2", node.parent_node_name, node.name) template_values['siblings'] = siblings template_values['node'] = node template_values['node_name'] = node.name template_values['node_title'] = node.title template_values['node_title_alternative'] = q[0].title_alternative if q[0].category is None: template_values['node_category'] = '' else: template_values['node_category'] = q[0].category if q[0].parent_node_name is None: template_values['node_parent_node_name'] = '' else: template_values['node_parent_node_name'] = q[0].parent_node_name if q[0].header is None: template_values['node_header'] = '' else: template_values['node_header'] = q[0].header if q[0].footer is None: template_values['node_footer'] = '' else: template_values['node_footer'] = q[0].footer if q[0].sidebar is None: template_values['node_sidebar'] = '' else: template_values['node_sidebar'] = q[0].sidebar if q[0].sidebar_ads is None: template_values['node_sidebar_ads'] = '' else: template_values['node_sidebar_ads'] = q[0].sidebar_ads template_values['node_topics'] = q[0].topics else: template_values['node'] = False section = GetKindByNum('Section', node.section_num) template_values['section'] = section if section is not False: template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') + u' › ' + section.title + u' › ' + node.title if browser['ios']: path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_node.html') else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_node.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') def post(self, node_name): site = GetSite() browser = detect(self.request) template_values = {} template_values['site'] = site template_values['system_version'] = SYSTEM_VERSION member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if (member): if (member.level == 0): template_values['member'] = member node = False q = db.GqlQuery("SELECT * FROM Node WHERE name = :1", node_name) if (q.count() == 1): node = q[0] template_values['node'] = q[0] template_values['node_name'] = q[0].name template_values['node_title'] = q[0].title template_values['node_title_alternative'] = q[0].title_alternative if q[0].category is None: template_values['node_category'] = '' else: template_values['node_category'] = q[0].category if q[0].parent_node_name is None: template_values['node_parent_node_name'] = '' else: template_values['node_parent_node_name'] = q[0].parent_node_name if q[0].header is None: template_values['node_header'] = '' else: template_values['node_header'] = q[0].header if q[0].footer is None: template_values['node_footer'] = '' else: template_values['node_footer'] = q[0].footer if q[0].sidebar is None: template_values['node_sidebar'] = '' else: template_values['node_sidebar'] = q[0].sidebar if q[0].sidebar_ads is None: template_values['node_sidebar_ads'] = '' else: template_values['node_sidebar_ads'] = q[0].sidebar_ads template_values['node_topics'] = q[0].topics else: template_values['node'] = False section = False q2 = db.GqlQuery("SELECT * FROM Section WHERE num = :1", q[0].section_num) if (q2.count() == 1): section = q2[0] template_values['section'] = q2[0] else: template_values['section'] = False if section is not False: template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') + u' › ' + section.title + u' › ' + node.title errors = 0 # Verification: name node_name_error = 0 node_name_error_messages = ['', u'请输入节点名', u'节点名长度不能超过 32 个字符', u'节点名只能由 a-Z 0-9 及 - 和 _ 组成', u'抱歉这个节点名已经存在了'] node_name = self.request.get('name').strip().lower() if (len(node_name) == 0): errors = errors + 1 node_name_error = 1 else: if (len(node_name) > 32): errors = errors + 1 node_name_error = 2 else: if (re.search('^[a-zA-Z0-9\-\_]+$', node_name)): q = db.GqlQuery('SELECT * FROM Node WHERE name = :1 AND num != :2', node_name.lower(), node.num) if (q.count() > 0): errors = errors + 1 node_name_error = 4 else: errors = errors + 1 node_name_error = 3 template_values['node_name'] = node_name template_values['node_name_error'] = node_name_error template_values['node_name_error_message'] = node_name_error_messages[node_name_error] # Verification: title node_title_error = 0 node_title_error_messages = ['', u'请输入节点标题', u'节点标题长度不能超过 32 个字符' ] node_title = self.request.get('title').strip() if (len(node_title) == 0): errors = errors + 1 node_title_error = 1 else: if (len(node_title) > 32): errors = errors + 1 node_title_error = 2 template_values['node_title'] = node_title template_values['node_title_error'] = node_title_error template_values['node_title_error_message'] = node_title_error_messages[node_title_error] # Verification: title_alternative node_title_alternative_error = 0 node_title_alternative_error_messages = ['', u'请输入节点副标题', u'节点标题长度不能超过 32 个字符' ] node_title_alternative = self.request.get('title_alternative').strip() if (len(node_title_alternative) == 0): errors = errors + 1 node_title_alternative_error = 1 else: if (len(node_title_alternative) > 32): errors = errors + 1 node_title_alternative_error = 2 template_values['node_title_alternative'] = node_title_alternative template_values['node_title_alternative_error'] = node_title_alternative_error template_values['node_title_alternative_error_message'] = node_title_alternative_error_messages[node_title_alternative_error] # Verification: node_category node_category = self.request.get('category').strip() template_values['node_category'] = node_category # Verification: node_parent_node_name node_parent_node_name = self.request.get('parent_node_name').strip() template_values['node_parent_node_name'] = node_parent_node_name # Verification: node_header node_header = self.request.get('header').strip() template_values['node_header'] = node_header # Verification: node_footer node_footer = self.request.get('footer').strip() template_values['node_footer'] = node_footer # Verification: node_sidebar node_sidebar = self.request.get('sidebar').strip() template_values['node_sidebar'] = node_sidebar # Verification: node_sidebar_ads node_sidebar_ads = self.request.get('sidebar_ads').strip() template_values['node_sidebar_ads'] = node_sidebar_ads template_values['errors'] = errors if (errors == 0): node.name = node_name node.title = node_title node.title_alternative = node_title_alternative node.category = node_category node.parent_node_name = node_parent_node_name node.header = node_header node.footer = node_footer node.sidebar = node_sidebar node.sidebar_ads = node_sidebar_ads node.put() memcache.delete('Node_' + str(node.num)) memcache.delete('Node::' + node.name) memcache.delete('index_categories') memcache.delete('home_nodes_new') self.redirect('/backstage/section/' + section.name) else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'mobile', 'backstage_node.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') class BackstageNodeAvatarHandler(BaseHandler): def get(self, node_name): self.redirect('/backstage/node/' + node_name) def post(self, node_name): if self.member: if self.member.level == 0: node = GetKindByName('Node', node_name) if node is None: return self.redirect('/backstage') dest = '/backstage/node/' + node.name timestamp = str(int(time.time())) try: avatar = self.request.get('avatar') except: return self.redirect(dest) if avatar is None: return self.redirect(dest) avatar_len = len(avatar) if avatar_len == 0: return self.redirect(dest) avatar_73 = images.resize(avatar, 73, 73) avatar_48 = images.resize(avatar, 48, 48) avatar_24 = images.resize(avatar, 24, 24) # Large 73x73 q1 = db.GqlQuery("SELECT * FROM Avatar WHERE name = :1", 'node_' + str(node.num) + '_large') if (q1.count() == 1): avatar_large = q1[0] avatar_large.content = db.Blob(avatar_73) avatar_large.put() else: qc1 = db.GqlQuery("SELECT * FROM Counter WHERE name = :1", 'avatar.max') if (qc1.count() == 1): counter1 = qc1[0] counter1.value = counter1.value + 1 else: counter1 = Counter() counter1.name = 'avatar.max' counter1.value = 1 counter1.put() avatar_large = Avatar() avatar_large.name = 'node_' + str(node.num) + '_large' avatar_large.content = db.Blob(avatar_73) avatar_large.num = counter1.value avatar_large.put() node.avatar_large_url = '/navatar/' + str(node.num) + '/large?r=' + timestamp node.put() # Normal 48x48 q2 = db.GqlQuery("SELECT * FROM Avatar WHERE name = :1", 'node_' + str(node.num) + '_normal') if (q2.count() == 1): avatar_normal = q2[0] avatar_normal.content = db.Blob(avatar_48) avatar_normal.put() else: qc2 = db.GqlQuery("SELECT * FROM Counter WHERE name = :1", 'avatar.max') if (qc2.count() == 1): counter2 = qc2[0] counter2.value = counter2.value + 1 else: counter2 = Counter() counter2.name = 'avatar.max' counter2.value = 1 counter2.put() avatar_normal = Avatar() avatar_normal.name = 'node_' + str(node.num) + '_normal' avatar_normal.content = db.Blob(avatar_48) avatar_normal.num = counter2.value avatar_normal.put() node.avatar_normal_url = '/navatar/' + str(node.num) + '/normal?r=' + timestamp node.put() # Mini 24x24 q3 = db.GqlQuery("SELECT * FROM Avatar WHERE name = :1", 'node_' + str(node.num) + '_mini') if (q3.count() == 1): avatar_mini = q3[0] avatar_mini.content = db.Blob(avatar_24) avatar_mini.put() else: qc3 = db.GqlQuery("SELECT * FROM Counter WHERE name = :1", 'avatar.max') if (qc3.count() == 1): counter3 = qc3[0] counter3.value = counter3.value + 1 else: counter3 = Counter() counter3.name = 'avatar.max' counter3.value = 1 counter3.put() avatar_mini = Avatar() avatar_mini.name = 'node_' + str(node.num) + '_mini' avatar_mini.content = db.Blob(avatar_24) avatar_mini.num = counter3.value avatar_mini.put() node.avatar_mini_url = '/navatar/' + str(node.num) + '/mini?r=' + timestamp node.put() # Upload to MobileMe use_this = False if config.mobileme_enabled and use_this: headers = {'Authorization' : 'Basic ' + base64.b64encode(config.mobileme_username + ':' + config.mobileme_password)} host = 'idisk.me.com' # Sharding timestamp = str(int(time.time())) shard = node.num % 31 root = '/' + config.mobileme_username + '/Web/Sites/v2ex/navatars/' + str(shard) root_mini = root + '/mini' root_normal = root + '/normal' root_large = root + '/large' h = httplib.HTTPConnection(host) # Mini h.request('PUT', root_mini + '/' + str(node.num) + '.png', str(avatar_24), headers) response = h.getresponse() if response.status == 201 or response.status == 204: node.avatar_mini_url = 'http://web.me.com/' + config.mobileme_username + '/v2ex/navatars/' + str(shard) + '/mini/' + str(node.num) + '.png?r=' + timestamp # Normal h.request('PUT', root_normal + '/' + str(node.num) + '.png', str(avatar_48), headers) response = h.getresponse() if response.status == 201 or response.status == 204: node.avatar_normal_url = 'http://web.me.com/' + config.mobileme_username + '/v2ex/navatars/' + str(shard) + '/normal/' + str(node.num) + '.png?r=' + timestamp # Large h.request('PUT', root_large + '/' + str(node.num) + '.png', str(avatar_73), headers) response = h.getresponse() if response.status == 201 or response.status == 204: node.avatar_large_url = 'http://web.me.com/' + config.mobileme_username + '/v2ex/navatars/' + str(shard) + '/large/' + str(node.num) + '.png?r=' + timestamp node.put() memcache.set('Node_' + str(node.num), node, 86400 * 14) memcache.set('Node::' + node.name, node, 86400 * 14) memcache.delete('Avatar::node_' + str(node.num) + '_large') memcache.delete('Avatar::node_' + str(node.num) + '_normal') memcache.delete('Avatar::node_' + str(node.num) + '_mini') #self.session['message'] = '新节点头像设置成功' self.redirect(dest) else: self.redirect('/') else: self.redirect('/signin') class BackstageRemoveReplyHandler(webapp.RequestHandler): def get(self, reply_key): member = CheckAuth(self) t = self.request.get('t') if (member): if (member.level == 0) and (str(member.created_ts) == str(t)): reply = db.get(db.Key(reply_key)) if reply: topic = reply.topic reply.delete() q = db.GqlQuery("SELECT __key__ FROM Reply WHERE topic = :1", topic) topic.replies = q.count() if (topic.replies == 0): topic.last_reply_by = None topic.put() pages = 1 memcache.delete('Topic_' + str(topic.num)) memcache.delete('topic_' + str(topic.num) + '_replies_desc_compressed') memcache.delete('topic_' + str(topic.num) + '_replies_asc_compressed') memcache.delete('topic_' + str(topic.num) + '_replies_filtered_compressed') memcache.delete('topic_' + str(topic.num) + '_replies_desc_rendered_desktop_' + str(pages)) memcache.delete('topic_' + str(topic.num) + '_replies_asc_rendered_desktop_' + str(pages)) memcache.delete('topic_' + str(topic.num) + '_replies_filtered_rendered_desktop_' + str(pages)) memcache.delete('topic_' + str(topic.num) + '_replies_desc_rendered_ios_' + str(pages)) memcache.delete('topic_' + str(topic.num) + '_replies_asc_rendered_ios_' + str(pages)) memcache.delete('topic_' + str(topic.num) + '_replies_filtered_rendered_ios_' + str(pages)) self.redirect('/t/' + str(topic.num)) else: self.redirect('/') else: self.redirect('/') else: self.redirect('/signin') class BackstageTidyReplyHandler(webapp.RequestHandler): def get(self, reply_num): member = CheckAuth(self) if (member): if (member.level == 0): q = db.GqlQuery("SELECT * FROM Reply WHERE num = :1", int(reply_num)) if (q.count() == 1): reply = q[0] topic_num = reply.topic_num q2 = db.GqlQuery("SELECT * FROM Member WHERE username_lower = :1", reply.created_by.lower()) member = q2[0] reply.member = member reply.member_num = member.num q3 = db.GqlQuery("SELECT * FROM Topic WHERE num = :1", topic_num) topic = q3[0] # Begin to do real stuff reply2 = Reply(parent=topic) reply2.num = reply.num reply2.content = reply.content reply2.topic = topic reply2.topic_num = topic.num reply2.member = reply.member reply2.member_num = reply.member_num reply2.created_by = reply.created_by reply2.source = reply.source reply2.created = reply.created reply2.last_modified = reply.last_modified reply2.put() reply.delete() self.redirect('/t/' + str(topic_num)) else: self.redirect('/') else: self.redirect('/') else: self.redirect('/signin') class BackstageTidyTopicHandler(webapp.RequestHandler): def get(self, topic_num): member = CheckAuth(self) if (member): if (member.level == 0): q = db.GqlQuery("SELECT * FROM Topic WHERE num = :1", int(topic_num)) if (q.count() == 1): topic = q[0] q2 = db.GqlQuery("SELECT * FROM Member WHERE num = :1", topic.member_num) member = q2[0] topic.member = member q3 = db.GqlQuery("SELECT * FROM Node WHERE num = :1", topic.node_num) node = q3[0] topic.node = node topic.put() memcache.delete('Topic_' + str(topic.num)) self.redirect('/t/' + str(topic.num)) else: self.redirect('/') else: self.redirect('/') else: self.redirect('/signin') class BackstageDeactivateUserHandler(webapp.RequestHandler): def get(self, key): member = CheckAuth(self) t = self.request.get('t') if member: if (member.level == 0) and (str(member.created_ts) == str(t)): one = db.get(db.Key(key)) if one: if one.num != 1: memcache.delete(one.auth) one.deactivated = int(time.time()) one.password = hashlib.sha1(str(time.time())).hexdigest() one.auth = hashlib.sha1(str(one.num) + ':' + one.password).hexdigest() one.newbie = 1 one.noob = 1 one.put() memcache.delete('Member_' + str(one.num)) return self.redirect('/member/' + one.username) return self.redirect('/') class BackstageMoveTopicHandler(webapp.RequestHandler): def get(self, key): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n topic = db.get(db.Key(key)) can_move = False ttl = 0 if member: if member.level == 0: can_move = True if topic: if topic.member_num == member.num: now = datetime.datetime.now() ttl = 300 - int((now - topic.created).seconds) if ttl > 0: can_move = True template_values['ttl'] = ttl template_values['can_move'] = can_move if member: template_values['member'] = member if can_move: template_values['page_title'] = site.title + u' › 移动主题' template_values['site'] = site if topic is not None: node = topic.node template_values['topic'] = topic template_values['node'] = node template_values['system_version'] = SYSTEM_VERSION themes = os.listdir(os.path.join(os.path.dirname(__file__), 'tpl', 'themes')) template_values['themes'] = themes path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_move_topic.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/') else: self.redirect('/signin') def post(self, key): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n topic = db.get(db.Key(key)) can_move = False ttl = 0 if member: if member.level == 0: can_move = True if topic: if topic.member_num == member.num: now = datetime.datetime.now() ttl = 300 - int((now - topic.created).seconds) if ttl > 0: can_move = True template_values['ttl'] = ttl template_values['can_move'] = can_move if member: template_values['member'] = member if can_move: template_values['page_title'] = site.title + u' › 移动主题' template_values['site'] = site if topic is not None: errors = 0 node = topic.node template_values['topic'] = topic template_values['node'] = node template_values['system_version'] = SYSTEM_VERSION destination = self.request.get('destination') if destination is not None: node_new = GetKindByName('Node', destination) if node_new is not False: node_new = db.get(node_new.key()) node_old = topic.node node_old.topics = node_old.topics - 1 node_old.put() node_new.topics = node_new.topics + 1 node_new.put() topic.node = node_new topic.node_num = node_new.num topic.node_name = node_new.name topic.node_title = node_new.title topic.put() memcache.delete('Topic_' + str(topic.num)) memcache.delete('Node_' + str(node_old.num)) memcache.delete('Node_' + str(node_new.num)) memcache.delete('Node::' + str(node_old.name)) memcache.delete('Node::' + str(node_new.name)) memcache.delete('q_latest_16') memcache.delete('home_rendered') memcache.delete('home_rendered_mobile') self.redirect('/t/' + str(topic.num)) else: errors = errors + 1 else: errors = errors + 1 if errors > 0: themes = os.listdir(os.path.join(os.path.dirname(__file__), 'tpl', 'themes')) template_values['themes'] = themes path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_move_topic.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/') else: self.redirect('/signin') class BackstageSiteHandler(webapp.RequestHandler): def get(self): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: template_values['page_title'] = site.title + u' › 站点设置' template_values['site'] = site template_values['site_title'] = site.title template_values['site_slogan'] = site.slogan template_values['site_domain'] = site.domain template_values['site_description'] = site.description if site.home_categories is not None: template_values['site_home_categories'] = site.home_categories else: template_values['site_home_categories'] = '' if site.hostemail is not None: template_values['site_hostemail'] = site.hostemail else: template_values['site_hostemail'] = '' if site.analytics is not None: template_values['site_analytics'] = site.analytics else: template_values['site_analytics'] = '' if site.topic_view_level is not None: template_values['site_topic_view_level'] = site.topic_view_level else: template_values['site_topic_view_level'] = -1 if site.topic_create_level is not None: template_values['site_topic_create_level'] = site.topic_create_level else: template_values['site_topic_create_level'] = 1000 if site.topic_reply_level is not None: template_values['site_topic_reply_level'] = site.topic_reply_level else: template_values['site_topic_reply_level'] = 1000 if site.meta is not None: template_values['site_meta'] = site.meta else: template_values['site_meta'] = '' if site.home_top is not None: template_values['site_home_top'] = site.home_top else: template_values['site_home_top'] = '' if site.theme is not None: template_values['site_theme'] = site.theme else: template_values['site_theme'] = 'default' if site.data_migration_mode is not None: template_values['site_data_migration_mode'] = site.data_migration_mode else: template_values['site_data_migration_mode'] = 0 s = GetLanguageSelect(site.l10n) template_values['s'] = s template_values['member'] = member template_values['system_version'] = SYSTEM_VERSION themes = os.listdir(os.path.join(os.path.dirname(__file__), 'tpl', 'themes')) template_values['themes'] = themes path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_site.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') def post(self): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: template_values['page_title'] = site.title + u' › 站点设置' template_values['site'] = site template_values['member'] = member template_values['system_version'] = SYSTEM_VERSION errors = 0 # Verification: title (required) site_title_error = 0 site_title_error_messages = ['', u'请输入站点名', u'站点名长度不能超过 40 个字符' ] site_title = self.request.get('title').strip() if (len(site_title) == 0): errors = errors + 1 site_title_error = 1 else: if (len(site_title) > 40): errors = errors + 1 site_title_error = 1 template_values['site_title'] = site_title template_values['site_title_error'] = site_title_error template_values['site_title_error_message'] = site_title_error_messages[site_title_error] # Verification: slogan (required) site_slogan_error = 0 site_slogan_error_messages = ['', u'请输入站点标语', u'站点标语长度不能超过 140 个字符' ] site_slogan = self.request.get('slogan').strip() if (len(site_slogan) == 0): errors = errors + 1 site_slogan_error = 1 else: if (len(site_slogan) > 140): errors = errors + 1 site_slogan_error = 1 template_values['site_slogan'] = site_slogan template_values['site_slogan_error'] = site_slogan_error template_values['site_slogan_error_message'] = site_slogan_error_messages[site_slogan_error] # Verification: domain (required) site_domain_error = 0 site_domain_error_messages = ['', u'请输入主要域名', u'主要域名长度不能超过 40 个字符' ] site_domain = self.request.get('domain').strip() if (len(site_domain) == 0): errors = errors + 1 site_domain_error = 1 else: if (len(site_domain) > 40): errors = errors + 1 site_domain_error = 1 template_values['site_domain'] = site_domain template_values['site_domain_error'] = site_domain_error template_values['site_domain_error_message'] = site_domain_error_messages[site_domain_error] # Verification: description (required) site_description_error = 0 site_description_error_messages = ['', u'请输入站点简介', u'站点简介长度不能超过 200 个字符' ] site_description = self.request.get('description').strip() if (len(site_description) == 0): errors = errors + 1 site_description_error = 1 else: if (len(site_description) > 200): errors = errors + 1 site_description_error = 1 template_values['site_description'] = site_description template_values['site_description_error'] = site_description_error template_values['site_description_error_message'] = site_description_error_messages[site_description_error] # Verification: analytics (optional) site_analytics_error = 0 site_analytics_error_messages = ['', u'Analytics ID 格式不正确' ] site_analytics = self.request.get('analytics').strip() if len(site_analytics) > 0: if re.findall('^UA\-[0-9]+\-[0-9]+$', site_analytics): site_analytics_error = 0 else: errors = errors + 1 site_analytics_error = 1 else: site_analytics = '' template_values['site_analytics'] = site_analytics template_values['site_analytics_error'] = site_analytics_error template_values['site_analytics_error_message'] = site_analytics_error_messages[site_analytics_error] site_hostemail = self.request.get('hostemail').strip() template_values['site_hostemail'] = site_hostemail # Verification: l10n (required) site_l10n = self.request.get('l10n').strip() supported = GetSupportedLanguages() if site_l10n == '': site_l10n = site.l10n else: if site_l10n not in supported: site_l10n = site.l10n s = GetLanguageSelect(site_l10n) template_values['s'] = s template_values['site_l10n'] = site_l10n # Verification: home_categories (optional) site_home_categories_error = 0 site_home_categories_error_messages = ['', u'首页分类信息不要超过 2000 个字符' ] site_home_categories = self.request.get('home_categories').strip() site_home_categories_length = len(site_home_categories) if len(site_home_categories) > 0: if site_home_categories_length > 2000: errors = errors + 1 site_home_categories_error = 1 else: site_home_categories = '' template_values['site_home_categories'] = site_home_categories template_values['site_home_categories_error'] = site_home_categories_error template_values['site_home_categories_error_message'] = site_home_categories_error_messages[site_home_categories_error] # Verification: topic_view_level (default=-1) site_topic_view_level = self.request.get('topic_view_level') try: site_topic_view_level = int(site_topic_view_level) if site_topic_view_level < -1: site_topic_view_level = -1 except: site_topic_view_level = -1 template_values['site_topic_view_level'] = site_topic_view_level # Verification: topic_create_level (default=1000) site_topic_create_level = self.request.get('topic_create_level') try: site_topic_create_level = int(site_topic_create_level) if site_topic_create_level < -1: site_topic_create_level = 1000 except: site_topic_create_level = 1000 template_values['site_topic_create_level'] = site_topic_create_level # Verification: topic_reply_level (default=1000) site_topic_reply_level = self.request.get('topic_reply_level') try: site_topic_reply_level = int(site_topic_reply_level) if site_topic_reply_level < -1: site_topic_reply_level = 1000 except: site_topic_reply_level = 1000 template_values['site_topic_reply_level'] = site_topic_reply_level # Verification: meta site_meta = self.request.get('meta') template_values['site_meta'] = site_meta # Verification: home_top site_home_top = self.request.get('home_top') template_values['site_home_top'] = site_home_top # Verification: theme site_theme = self.request.get('theme') themes = os.listdir(os.path.join(os.path.dirname(__file__), 'tpl', 'themes')) template_values['themes'] = themes if site_theme in themes: template_values['site_theme'] = site_theme else: site_theme = 'default' template_values['site_theme'] = site_theme # Verification: data_migration_mode site_data_migration_mode = self.request.get('data_migration_mode') if site_data_migration_mode == 'on': template_values['site_data_migration_mode'] = 1 else: template_values['site_data_migration_mode'] = 0 template_values['errors'] = errors if errors == 0: site.title = site_title site.slogan = site_slogan site.domain = site_domain site.description = site_description if site_home_categories != '': site.home_categories = site_home_categories if site_analytics != '': site.analytics = site_analytics site.hostemail = site_hostemail site.l10n = site_l10n site.topic_view_level = site_topic_view_level site.topic_create_level = site_topic_create_level site.topic_reply_level = site_topic_reply_level site.meta = site_meta site.home_top = site_home_top site.theme = site_theme site.data_migration_mode = template_values['site_data_migration_mode'] site.put() memcache.delete('index_categories') template_values['message'] = l10n.site_settings_updated; template_values['site'] = site memcache.delete('site') path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_site.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') class BackstageTopicHandler(webapp.RequestHandler): def get(self): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') + u' › ' + l10n.topic_settings.decode('utf-8') template_values['site'] = site template_values['site_use_topic_types'] = site.use_topic_types if site.topic_types is None: template_values['site_topic_types'] = '' else: template_values['site_topic_types'] = site.topic_types if site.use_topic_types is not True: s = '<select name="use_topic_types"><option value="1">Enabled</option><option value="0" selected="selected">Disabled</option></select>' else: s = '<select name="use_topic_types"><option value="1" selected="selected">Enabled</option><option value="0">Disabled</option></select>' template_values['s'] = s template_values['member'] = member template_values['system_version'] = SYSTEM_VERSION path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_topic.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') def post(self): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') + u' › ' + l10n.topic_settings.decode('utf-8') template_values['site'] = site template_values['site_use_topic_types'] = site.use_topic_types if site.topic_types is None: template_values['site_topic_types'] = '' else: template_values['site_topic_types'] = site.topic_types if site.use_topic_types is not True: s = '<select name="use_topic_types"><option value="1">Enabled</option><option value="0" selected="selected">Disabled</option></select>' else: s = '<select name="use_topic_types"><option value="1" selected="selected">Enabled</option><option value="0">Disabled</option></select>' template_values['s'] = s template_values['member'] = member template_values['system_version'] = SYSTEM_VERSION errors = 0 # Verification: use_topic_types site_use_topic_types = self.request.get('use_topic_types').strip() if site_use_topic_types is None: s = '<select name="use_topic_types"><option value="1">Enabled</option><option value="0" selected="selected">Disabled</option></select>' else: if site_use_topic_types == '1': s = '<select name="use_topic_types"><option value="1" selected="selected">Enabled</option><option value="0">Disabled</option></select>' else: s = '<select name="use_topic_types"><option value="1">Enabled</option><option value="0" selected="selected">Disabled</option></select>' template_values['s'] = s # Verification: topic_types site_topic_types = self.request.get('topic_types').strip() if errors == 0: if site_use_topic_types == '1': site.use_topic_types = True else: site.use_topic_types = False site.topic_types = site_topic_types site.put() memcache.delete('site') self.redirect('/backstage') else: path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_topic.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') class BackstageRemoveMemcacheHandler(webapp.RequestHandler): def post(self): member = CheckAuth(self) if member: if member.level == 0: mc = self.request.get('mc') if mc is not None: memcache.delete(mc) self.redirect('/backstage') class BackstageMemberHandler(webapp.RequestHandler): def get(self, member_username): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: member_username_lower = member_username.lower() q = db.GqlQuery("SELECT * FROM Member WHERE username_lower = :1", member_username_lower) if (q.count() == 1): one = q[0] template_values['one'] = one errors = 0 template_values['one_username'] = one.username template_values['one_email'] = one.email if one.avatar_large_url is None: template_values['one_avatar_large_url'] = '' else: template_values['one_avatar_large_url'] = one.avatar_large_url if one.avatar_normal_url is None: template_values['one_avatar_normal_url'] = '' else: template_values['one_avatar_normal_url'] = one.avatar_normal_url if one.avatar_mini_url is None: template_values['one_avatar_mini_url'] = '' else: template_values['one_avatar_mini_url'] = one.avatar_mini_url if one.bio is None: template_values['one_bio'] = '' else: template_values['one_bio'] = one.bio template_values['one_level'] = one.level template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') + u' › ' + one.username template_values['site'] = site template_values['member'] = member template_values['system_version'] = SYSTEM_VERSION template_values['latest_members'] = db.GqlQuery("SELECT * FROM Member ORDER BY created DESC LIMIT 5") path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_member.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/backstage') else: self.redirect('/') def post(self, member_username): template_values = {} site = GetSite() member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: member_username_lower = member_username.lower() q = db.GqlQuery("SELECT * FROM Member WHERE username_lower = :1", member_username_lower) if (q.count() == 1): one = q[0] template_values['one'] = one errors = 0 # Verification: username one_username_error = 0 one_username_error_messages = ['', l10n.username_empty, l10n.username_too_long, l10n.username_too_short, l10n.username_invalid, l10n.username_taken] one_username = self.request.get('username').strip() if (len(one_username) == 0): errors = errors + 1 one_username_error = 1 else: if (len(one_username) > 32): errors = errors + 1 one_username_error = 2 else: if (len(one_username) < 3): errors = errors + 1 one_username_error = 3 else: if (re.search('^[a-zA-Z0-9\_]+$', one_username)): q = db.GqlQuery('SELECT * FROM Member WHERE username_lower = :1 AND num != :2', one_username.lower(), one.num) if (q.count() > 0): errors = errors + 1 one_username_error = 5 else: errors = errors + 1 one_username_error = 4 template_values['one_username'] = one_username template_values['one_username_error'] = one_username_error template_values['one_username_error_message'] = one_username_error_messages[one_username_error] # Verification: email one_email_error = 0 one_email_error_messages = ['', u'请输入电子邮件地址', u'电子邮件地址长度不能超过 32 个字符', u'输入的电子邮件地址不符合规则', u'这个电子邮件地址已经有人注册过了'] one_email = self.request.get('email').strip() if (len(one_email) == 0): errors = errors + 1 one_email_error = 1 else: if (len(one_email) > 32): errors = errors + 1 one_email_error = 2 else: p = re.compile(r"(?:^|\s)[-a-z0-9_.]+@(?:[-a-z0-9]+\.)+[a-z]{2,6}(?:\s|$)", re.IGNORECASE) if (p.search(one_email)): q = db.GqlQuery('SELECT * FROM Member WHERE email = :1 AND num != :2', one_email.lower(), one.num) if (q.count() > 0): errors = errors + 1 one_email_error = 4 else: errors = errors + 1 one_email_error = 3 template_values['one_email'] = one_email.lower() template_values['one_email_error'] = one_email_error template_values['one_email_error_message'] = one_email_error_messages[one_email_error] # Verification: avatar one_avatar_large_url = self.request.get('avatar_large_url') template_values['one_avatar_large_url'] = one_avatar_large_url one_avatar_normal_url = self.request.get('avatar_normal_url') template_values['one_avatar_normal_url'] = one_avatar_normal_url one_avatar_mini_url = self.request.get('avatar_mini_url') template_values['one_avatar_mini_url'] = one_avatar_mini_url # Verification: bio one_bio = self.request.get('bio') template_values['one_bio'] = one_bio # Verification: level one_level = self.request.get('level') try: one_level = int(one_level) except: if one.num == 1: one_level = 0 else: one_level = 1000 template_values['one_level'] = one_level if errors == 0: one.username = one_username one.username_lower = one_username.lower() one.email = one_email one.avatar_large_url = one_avatar_large_url one.avatar_normal_url = one_avatar_normal_url one.avatar_mini_url = one_avatar_mini_url one.bio = one_bio one.level = one_level one.put() memcache.delete('Member_' + str(one.num)) memcache.delete('Member::' + one_username.lower()) self.redirect('/backstage') else: template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') + u' › ' + one.username template_values['site'] = site template_values['member'] = member template_values['system_version'] = SYSTEM_VERSION template_values['latest_members'] = db.GqlQuery("SELECT * FROM Member ORDER BY created DESC LIMIT 5") path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_member.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/backstage') else: self.redirect('/') class BackstageMembersHandler(webapp.RequestHandler): def get(self): template_values = {} site = GetSite() template_values['site'] = site member = CheckAuth(self) l10n = GetMessages(self, member, site) template_values['l10n'] = l10n if member: if member.level == 0: template_values['member'] = member template_values['page_title'] = site.title + u' › ' + l10n.backstage.decode('utf-8') + u' › 浏览所有会员' member_total = memcache.get('member_total') if member_total is None: q3 = db.GqlQuery("SELECT * FROM Counter WHERE name = 'member.total'") if (q3.count() > 0): member_total = q3[0].value else: member_total = 0 memcache.set('member_total', member_total, 600) template_values['member_total'] = member_total page_size = 60 pages = 1 if member_total > page_size: if (member_total % page_size) > 0: pages = int(math.floor(member_total / page_size)) + 1 else: pages = int(math.floor(member_total / page_size)) try: page_current = int(self.request.get('p')) if page_current < 1: page_current = 1 if page_current > pages: page_current = pages except: page_current = 1 page_start = (page_current - 1) * page_size template_values['pages'] = pages template_values['page_current'] = page_current i = 1 ps = [] while i <= pages: ps.append(i) i = i + 1 template_values['ps'] = ps q = db.GqlQuery("SELECT * FROM Member ORDER BY created DESC LIMIT " + str(page_start )+ "," + str(page_size)) template_values['members'] = q path = os.path.join(os.path.dirname(__file__), 'tpl', 'desktop', 'backstage_members.html') output = template.render(path, template_values) self.response.out.write(output) else: self.redirect('/') else: self.redirect('/signin') class BackstageRemoveNotificationHandler(BaseHandler): def get(self, key): o = db.get(db.Key(key)) if o and self.member: if type(o).__name__ == 'Notification': if o.for_member_num == self.member.num: o.delete() memcache.delete('nn::' + self.member.username_lower) self.redirect('/notifications') def main(): application = webapp.WSGIApplication([ ('/backstage', BackstageHomeHandler), ('/backstage/new/minisite', BackstageNewMinisiteHandler), ('/backstage/minisite/(.*)', BackstageMinisiteHandler), ('/backstage/remove/minisite/(.*)', BackstageRemoveMinisiteHandler), ('/backstage/new/page/(.*)', BackstageNewPageHandler), ('/backstage/page/(.*)', BackstagePageHandler), ('/backstage/remove/page/(.*)', BackstageRemovePageHandler), ('/backstage/new/section', BackstageNewSectionHandler), ('/backstage/section/(.*)', BackstageSectionHandler), ('/backstage/new/node/(.*)', BackstageNewNodeHandler), ('/backstage/node/([a-z0-9A-Z]+)', BackstageNodeHandler), ('/backstage/node/([a-z0-9A-Z]+)/avatar', BackstageNodeAvatarHandler), ('/backstage/remove/reply/(.*)', BackstageRemoveReplyHandler), ('/backstage/tidy/reply/([0-9]+)', BackstageTidyReplyHandler), ('/backstage/tidy/topic/([0-9]+)', BackstageTidyTopicHandler), ('/backstage/deactivate/user/(.*)', BackstageDeactivateUserHandler), ('/backstage/move/topic/(.*)', BackstageMoveTopicHandler), ('/backstage/site', BackstageSiteHandler), ('/backstage/topic', BackstageTopicHandler), ('/backstage/remove/mc', BackstageRemoveMemcacheHandler), ('/backstage/member/(.*)', BackstageMemberHandler), ('/backstage/members', BackstageMembersHandler), ('/backstage/remove/notification/(.*)', BackstageRemoveNotificationHandler), ], debug=True) util.run_wsgi_app(application) if __name__ == '__main__': main()
{ "content_hash": "de461f504032942e04ef19043fb065ad", "timestamp": "", "source": "github", "line_count": 2328, "max_line_length": 182, "avg_line_length": 49.19115120274914, "alnum_prop": 0.4665857470943179, "repo_name": "woshilaowang/v2ex", "id": "2ad687421c7f978ecf1aecd6ea7fb9d5c762b9f5", "size": "116223", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "backstage.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "87390" }, { "name": "HTML", "bytes": "375999" }, { "name": "JavaScript", "bytes": "17553" }, { "name": "Python", "bytes": "1415660" } ], "symlink_target": "" }
import math from .adjacency import adj_list_with_edges from ..ds import PriorityQueue class Vertex: def __init__(self, index, dist): self.index = index self.dist = dist def vertex_comp(vertex1, vertex2): return vertex1.dist < vertex2.dist def dijkstra(vertices, source, target, weighted_edges, visual = False): """ Finds the shortest path from a single source to a destination (SSSP) Args: vertices: The number of vertices (should be numbered 0 to n - 1 / 1 to n) source: The index of the source (starting) vertex target: The index of the destination vertex weighted_edges: formatted, weighted edges of the graph visual: Set to True for additional screen output Returns: The shortest distance from source to target Time: O(E + V log V) Explanation: 1) Sort edges in ascending weight (smallest to largest) - Merge Sort used 2) Add endpoints of smallest edge to MST (a disjoint set) 3) Repeat Step 2 for next smallest edge until all edges are processed """ alist = adj_list_with_edges(vertices, weighted_edges) dist = [math.inf] * (vertices + 1) dist[source] = 0 visited = [False] * (vertices + 1) pq = PriorityQueue(vertex_comp) pq.insert(Vertex(source, dist[source])) if visual: print("Performing Dijsktra's algorithm...\n-----") step = 1 while not pq.is_empty(): current_vertex = pq.pop() if visited[current_vertex.index]: continue visited[current_vertex.index] = True if visual: visual_string = "Step {}\t".format(step) step += 1 for v in range(vertices): visual_string += "\td[{}]".format(v) print(visual_string) visual_string = "\t\t" for v in range(vertices): visual_string += "{}\t".format(dist[v]) print(visual_string) if current_vertex.index == target: break for e in alist[current_vertex.index]: if dist[current_vertex.index] + e.weight < dist[e.end] and not visited[e.end]: dist[e.end] = dist[current_vertex.index] + e.weight pq.insert(Vertex(e.end, dist[e.end])) if visual: print("-----\nDistance from Vertex {} to {}:\t {}".format(source, target, dist[target])) return dist[target]
{ "content_hash": "93abc78daaa6af67eacc971a4258c0ec", "timestamp": "", "source": "github", "line_count": 78, "max_line_length": 90, "avg_line_length": 27.205128205128204, "alnum_prop": 0.6856738925541942, "repo_name": "aryrobocode/apothecary", "id": "2c0c70f8afc804432f4f7bb27a35b3bc65dc3bcc", "size": "2122", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "apothecary/cs/graphs/dijkstra.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "47836" } ], "symlink_target": "" }
while True: pass
{ "content_hash": "c9a4451ff7c3259a28653fafe73525b3", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 11, "avg_line_length": 10.5, "alnum_prop": 0.6190476190476191, "repo_name": "kadashu/satori", "id": "52492121bddc23c51f323a20bc9f67c8bcfcd9b4", "size": "40", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "satori-rules/plugin/evil/10_cpuhog.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "189613" }, { "name": "Clojure", "bytes": "52043" }, { "name": "Go", "bytes": "102479" }, { "name": "HTML", "bytes": "10254" }, { "name": "JavaScript", "bytes": "16585" }, { "name": "Python", "bytes": "4195260" }, { "name": "Ruby", "bytes": "2312" }, { "name": "Shell", "bytes": "18923" }, { "name": "Smarty", "bytes": "4807" } ], "symlink_target": "" }
''' Metadata driver for Digital Globe imagery B{Format specification}: - U{http://www.digitalglobe.com/downloads/Imagery_Support_Data_Documentation.pdf} ''' format_regex=[r'-[0-9]*_[0-9]{2,2}_[A-Z][0-9]{3,3}.*\.imd$', r'-[0-9]*_[0-9]{2,2}_[A-Z][0-9]{3,3}.*\.tif$', r'-[0-9]*_[0-9]{2,2}_[A-Z][0-9]{3,3}.*\.img$', r'-[0-9]*_[0-9]{2,2}_[A-Z][0-9]{3,3}.*\.ntf$']#Digital Globe '''Regular expression list of file formats''' #import base dataset modules import __dataset__ import __default__ # import other modules (use "_" prefix to import privately) import sys, os, re, glob, time, math, string from metageta import utilities, geometry try: from osgeo import gdal from osgeo import gdalconst from osgeo import osr from osgeo import ogr except ImportError: import gdal import gdalconst import osr import ogr gdal.AllRegister() class Dataset(__default__.Dataset): '''Subclass of __default__.Dataset class so we get a load of metadata populated automatically''' def __init__(self,f): if f[:4]=='/vsi':raise NotImplementedError self.filelist=glob.glob(os.path.dirname(f)+'/*') if os.path.splitext(f)[1].lower() !='imd': imd=glob.glob(os.path.splitext(f)[0]+'.[Ii][Mm][Dd]') if imd: self.__setfileinfo__(imd[0]) else:raise NotImplementedError, 'No matching IMD file' self.exts={'.tif':'GTiff/GeoTIFF','.img':'HFA/Erdas Imagine Images (.img)','.ntf':'NITF/National Imagery Transmission Format (.ntf)','.pix':'PCI Geomatics Database File (.pix)'} self.til=False self.img=False btil,self.til = utilities.exists(os.path.splitext(f)[0]+'.til',True) if not btil: for ext in self.exts: bext,ext = utilities.exists(os.path.splitext(f)[0]+ext,True) if bext: self.img=ext break if not self.img:raise NotImplementedError, 'Matching DigitalGlobe imagery file not found:\n' def __getmetadata__(self): '''Read Metadata for an Digital Globe format image as GDAL doesn't quite get it all... @todo: Fix QB GDA94 Geographic CS "Unknown datum" problem ''' f=self.fileinfo['filepath'] imddata=self.__getimddata__(f) if self.til: vrt=self.__gettilevrt__(self.til,imddata) __default__.Dataset.__getmetadata__(self, vrt) for tmp in self.filelist: for ext in self.exts: if tmp[-4:].lower()==ext: self.metadata['filetype']=self.exts[ext] break else: __default__.Dataset.__getmetadata__(self, self.img) self.metadata['metadata']=open(f).read() if imddata.has_key('IMAGE_1'):imgkey='IMAGE_1' else:imgkey='SINGLE_IMAGE_PRODUCT' if imddata.has_key('MAP_PROJECTED_PRODUCT'): imgdate1=imddata['MAP_PROJECTED_PRODUCT']['earliestAcqTime'][0:19]#Already in ISO 8601 format, just strip off millisecs imgdate2=imddata['MAP_PROJECTED_PRODUCT']['latestAcqTime'][0:19] if imgdate1==imgdate2:self.metadata['imgdate']='%s'%(imgdate1) else:self.metadata['imgdate']='%s/%s'%(imgdate1,imgdate2) elif imddata[imgkey].has_key('firstLineTime'): self.metadata['imgdate']=imddata[imgkey]['firstLineTime'][0:19] #Already in ISO 8601 format, just strip off millisecs if imddata[imgkey]['satId']=='QB02': self.metadata['satellite']='Quickbird (QB02)' elif imddata[imgkey]['satId']=='WV01': self.metadata['satellite']='Worldview-1 (WV01)' elif imddata[imgkey]['satId']=='WV02': self.metadata['satellite']='Worldview-2 (WV02)' else: self.metadata['satellite']=imddata[imgkey]['satId'] if imddata['bandId'] == 'P':self.metadata['sensor']='PANCHROMATIC' else: if imddata['panSharpenAlgorithm']== 'None':self.metadata['sensor']='MULTISPECTRAL' else:self.metadata['sensor']='MULTI/PAN' #if imddata['bandId']=='Multi': # if imddata['nbands'] == 3:self.metadata['bands'] = 'B,G,R' # elif imddata['nbands'] == 4:self.metadata['bands'] = 'B,G,R,N' #else: #'BGRN','RGB','P' # self.metadata['bands'] = ','.join([l for l in imddata['bandId']]) self.metadata['bands'] = ','.join([b.split('_')[1] for b in imddata.keys() if b[0:5]=='BAND_']) if imddata[imgkey].has_key('meanSatEl'): self.metadata['satelevation'] = imddata[imgkey]['meanSatEl'] self.metadata['satazimuth'] = imddata[imgkey]['meanSatAz'] elif imddata[imgkey].has_key('satEl'): self.metadata['satelevation'] = imddata[imgkey]['satEl'] self.metadata['satazimuth'] = imddata[imgkey]['satAz'] if imddata[imgkey].has_key('meanSunEl'): self.metadata['sunelevation'] = imddata[imgkey]['meanSunEl'] self.metadata['sunazimuth'] = imddata[imgkey]['meanSunAz'] elif imddata[imgkey].has_key('sunEl'): self.metadata['sunelevation'] = imddata[imgkey]['sunEl'] self.metadata['sunazimuth'] = imddata[imgkey]['sunAz'] self.metadata['level'] = imddata['productLevel'] if imddata[imgkey].has_key('cloudCover'): self.metadata['cloudcover'] = imddata[imgkey]['cloudCover'] elif imddata[imgkey].has_key('manualCloudCover'): self.metadata['cloudcover'] = max([0, imddata[imgkey]['manualCloudCover']]) #hack for -999 cloud cover elif imddata[imgkey].has_key('autoCloudCover'): self.metadata['cloudcover'] = max([0, imddata[imgkey]['autoCloudCover']]) if imddata[imgkey].has_key('offNadirViewAngle'): self.metadata['viewangle'] = imddata[imgkey]['offNadirViewAngle'] elif imddata[imgkey].has_key('meanOffNadirViewAngle'): self.metadata['viewangle'] = imddata[imgkey]['meanOffNadirViewAngle'] if imddata[imgkey].has_key('CatId'): self.metadata['sceneid'] = imddata[imgkey]['CatId'] if imddata[imgkey].has_key('resamplingKernel'): self.metadata['resampling'] = imddata[imgkey]['resamplingKernel'] elif imddata.has_key('MAP_PROJECTED_PRODUCT') and imddata['MAP_PROJECTED_PRODUCT'].has_key('resamplingKernel'): self.metadata['resampling'] = imddata['MAP_PROJECTED_PRODUCT']['resamplingKernel'] if imddata.has_key('MAP_PROJECTED_PRODUCT') and imddata['MAP_PROJECTED_PRODUCT'].has_key('DEMCorrection'): self.metadata['demcorrection'] = imddata['MAP_PROJECTED_PRODUCT']['DEMCorrection'] #self.extent is set in __default__.Dataset.__getmetadata__() def __gettilevrt__(self,f,imddata): til=iter(open(f).readlines()) tileinfo={} datasets={} line=til.next() while line: #Extract all keys and values from the header file into a dictionary line=line.strip().strip(';').replace('"','') if line == 'END':break if 'BEGIN_GROUP' in line: line=til.next() while line: line=line.strip().strip(';').replace('"','') if 'END_GROUP' in line:break else: dat=map(string.strip, line.split('=',1)) if not dat[0] in datasets:datasets[dat[0]]=[] datasets[dat[0]].append(dat[1]) line=til.next() else: var=map(string.strip, line.split('=',1)) tileinfo[var[0]]=var[1] line=til.next() curdir=os.path.dirname(f) bimg,img=utilities.exists(os.path.join(curdir,datasets['filename'][0]),True) ds=geometry.OpenDataset(img) rb=ds.GetRasterBand(1) DataType=gdal.GetDataTypeName(rb.DataType) GeoTransform=ds.GetGeoTransform() Projection=ds.GetProjection() if GeoTransform==(0.0, 1.0, 0.0, 0.0, 0.0, 1.0): GeoTransform=gdal.GCPsToGeoTransform(ds.GetGCPs()) if Projection=='': Projection=ds.GetGCPProjection() GeoTransform=','.join(map(str, GeoTransform)) numTiles=int(tileinfo['numTiles']) BlockXSize,BlockYSize=rb.GetBlockSize() vrtXML = [] vrtXML.append('<VRTDataset rasterXSize="%s" rasterYSize="%s">' % (imddata['numColumns'],imddata['numRows'])) vrtXML.append('<SRS>%s</SRS>' % Projection) vrtXML.append('<GeoTransform>%s</GeoTransform>' % GeoTransform) for b, band in enumerate(imddata['bands']): b+=1 vrtXML.append(' <VRTRasterBand dataType="%s" band="%s">' % (DataType,b)) #vrtXML.append(' <ColorInterp>Gray</ColorInterp>') for tile in range(0,numTiles): tileSizeX=int(datasets['URColOffset'][tile])-int(datasets['ULColOffset'][tile])+1 tileSizeY=int(datasets['LLRowOffset'][tile])-int(datasets['ULRowOffset'][tile])+1 ULColOffset=datasets['ULColOffset'][tile] ULRowOffset=datasets['ULRowOffset'][tile] bimg,img=utilities.exists(os.path.join(curdir,datasets['filename'][tile]),True) vrtXML.append(' <SimpleSource>') vrtXML.append(' <SourceFilename relativeToVRT="0">%s</SourceFilename>' % img) vrtXML.append(' <SourceBand>%s</SourceBand>' % (b)) vrtXML.append(' <SourceProperties RasterXSize="%s" RasterYSize="%s" DataType="%s"/>'%(tileSizeX,tileSizeY,DataType))# BlockXSize="%s" BlockYSize="%s"/>'(tileSizeX,tileSizeY,DataType,BlockXSize,BlockYSize)) vrtXML.append(' <SrcRect xOff="0" yOff="0" xSize="%s" ySize="%s"/>' %(tileSizeX,tileSizeY)) vrtXML.append(' <DstRect xOff="%s" yOff="%s" xSize="%s" ySize="%s"/>' % (ULColOffset,ULRowOffset,tileSizeX,tileSizeY)) vrtXML.append(' </SimpleSource>') vrtXML.append(' </VRTRasterBand>') vrtXML.append('</VRTDataset>') vrtXML='\n'.join(vrtXML) return vrtXML def __getimddata__(self,f): #Loop thru and parse the IMD file. #would be easier to walk the nodes in the XML files, but not all of our QB imagery has this #perhaps someone deleted them...? lines=iter(open(f).readlines()) imddata={} bands=[] line=lines.next() while line: line=[item.strip() for item in line.replace('"','').split('=')] #line = map(string.strip, lines[i].split('=')) group=line[0] if group == 'END;':break value=line[1] if group == 'BEGIN_GROUP': group=value subdata={} if 'BAND_' in group:bands.append(group) while line: line=lines.next() line = [l.replace('"','').strip() for l in line.split('=')] subgroup=line[0] subvalue=line[1] if subgroup == 'END_GROUP':break elif line[1] == '(': while line: line=lines.next() line = line.replace('"','').strip() subvalue+=line if line[-1:]==';': subvalue=eval(subvalue.strip(';')) break else:subvalue=subvalue.strip(';') subdata[subgroup]=subvalue imddata[group]=subdata else: imddata[group]=value.strip(');') line=lines.next() imddata['bands']=bands imddata['nbands']=len(bands) return imddata def getoverview(self,outfile=None,width=800,format='JPG'): ''' Generate overviews for Digital Globe imagery @type outfile: str @param outfile: a filepath to the output overview image. If supplied, format is determined from the file extension @type width: int @param width: image width @type format: str @param format: format to generate overview image, one of ['JPG','PNG','GIF','BMP','TIF']. Not required if outfile is supplied. @rtype: str @return: filepath (if outfile is supplied)/binary image data (if outfile is not supplied) ''' from metageta import overviews #First check for a browse graphic, no point re-inventing the wheel... f=self.fileinfo['filepath'] browse=os.path.splitext(f)[0]+'-browse.jpg' if os.path.exists(browse): try:return overviews.resize(browse,outfile,width) except:return __default__.Dataset.getoverview(self,outfile,width,format) #Try it the slow way... else: return __default__.Dataset.getoverview(self,outfile,width,format)#Do it the slow way...
{ "content_hash": "0d7027fa2f81529ec96229dbbfa25aac", "timestamp": "", "source": "github", "line_count": 270, "max_line_length": 223, "avg_line_length": 49.37777777777778, "alnum_prop": 0.5597809780978098, "repo_name": "lpinner/metageta", "id": "fc1905a6dca16aeb3e744ff0e4f09951dfafa75f", "size": "14507", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "metageta/formats/digitalglobe.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "15362" }, { "name": "Python", "bytes": "497443" }, { "name": "XSLT", "bytes": "101635" } ], "symlink_target": "" }
from __future__ import unicode_literals import string from django.core.exceptions import ImproperlyConfigured, ValidationError from django.db import models from django.db.models.signals import pre_delete, pre_save from django.http.request import split_domain_port from django.utils.encoding import python_2_unicode_compatible from django.utils.translation import ugettext_lazy as _ SITE_CACHE = {} def _simple_domain_name_validator(value): """ Validates that the given value contains no whitespaces to prevent common typos. """ if not value: return checks = ((s in value) for s in string.whitespace) if any(checks): raise ValidationError( _("The domain name cannot contain any spaces or tabs."), code='invalid', ) class SiteManager(models.Manager): use_in_migrations = True def _get_site_by_id(self, site_id): if site_id not in SITE_CACHE: site = self.get(pk=site_id) SITE_CACHE[site_id] = site return SITE_CACHE[site_id] def _get_site_by_request(self, request): host = request.get_host() try: # First attempt to look up the site by host with or without port. if host not in SITE_CACHE: SITE_CACHE[host] = self.get(domain__iexact=host) return SITE_CACHE[host] except Site.DoesNotExist: # Fallback to looking up site after stripping port from the host. domain, port = split_domain_port(host) if not port: raise if domain not in SITE_CACHE: SITE_CACHE[domain] = self.get(domain__iexact=domain) return SITE_CACHE[domain] def get_current(self, request=None): """ Returns the current Site based on the SITE_ID in the project's secure. If SITE_ID isn't defined, it returns the site with domain matching request.get_host(). The ``Site`` object is cached the first time it's retrieved from the database. """ from django.conf import settings if getattr(settings, 'SITE_ID', ''): site_id = settings.SITE_ID return self._get_site_by_id(site_id) elif request: return self._get_site_by_request(request) raise ImproperlyConfigured( "You're using the Django \"sites framework\" without having " "set the SITE_ID setting. Create a site in your database and " "set the SITE_ID setting or pass a request to " "Site.objects.get_current() to fix this error." ) def clear_cache(self): """Clears the ``Site`` object cache.""" global SITE_CACHE SITE_CACHE = {} @python_2_unicode_compatible class Site(models.Model): domain = models.CharField(_('domain name'), max_length=100, validators=[_simple_domain_name_validator], unique=True) name = models.CharField(_('display name'), max_length=50) objects = SiteManager() class Meta: db_table = 'django_site' verbose_name = _('site') verbose_name_plural = _('sites') ordering = ('domain',) def __str__(self): return self.domain def clear_site_cache(sender, **kwargs): """ Clears the cache (if primed) each time a site is saved or deleted """ instance = kwargs['instance'] using = kwargs['using'] try: del SITE_CACHE[instance.pk] except KeyError: pass try: del SITE_CACHE[Site.objects.using(using).get(pk=instance.pk).domain] except (KeyError, Site.DoesNotExist): pass pre_save.connect(clear_site_cache, sender=Site) pre_delete.connect(clear_site_cache, sender=Site)
{ "content_hash": "0654d9d5757b76a7d8235f921c1aec43", "timestamp": "", "source": "github", "line_count": 115, "max_line_length": 78, "avg_line_length": 32.530434782608694, "alnum_prop": 0.6209569633787757, "repo_name": "bretlowery/snakr", "id": "0b2996094de326dd634d1dd6a3c3c6d7da73b1bc", "size": "3741", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/django/contrib/sites/models.py", "mode": "33261", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "45763" }, { "name": "HTML", "bytes": "70189" }, { "name": "JavaScript", "bytes": "121904" }, { "name": "Makefile", "bytes": "91" }, { "name": "PLpgSQL", "bytes": "20088" }, { "name": "Python", "bytes": "5592645" }, { "name": "Shell", "bytes": "759" } ], "symlink_target": "" }
import logging import irc.client class IRCClient(object): def __init__(self, network, port, nickname): self.log = logging.getLogger("irchooky") self.network = network self.port = int(port) self.nickname = nickname self.server = None self.client = None self.channel = "" self.message = "" self.has_quit = False if not self.nickname: self.nickname = "irchooky" def connect(self): # pragma: no cover self.client = irc.client.Reactor() self.log.info("Connecting to IRC network %s" % self.network) try: self.server = self.client.server() self.server.connect(self.network, self.port, self.nickname) except irc.client.ServerConnectionError as x: self.log.error(x) raise def send_msg(self, msg, channel): if not msg: raise Exception("Invalid message") self.message = msg if not channel: raise Exception("Invalid channel name") self.channel = channel self.server.add_global_handler("join", self.irc_on_join) self.server.add_global_handler("welcome", self.irc_on_connect) self.server.add_global_handler("passwdmismatch", self.irc_on_passwdmismatch) self.server.add_global_handler("disconnect", self.irc_on_disconnect) self.server.add_global_handler("nicknameinuse", self.irc_on_invalidnickname) self.server.add_global_handler("nickcollision", self.irc_on_invalidnickname) self.server.add_global_handler("unavailresource", self.irc_on_invalidnickname) self.server.add_global_handler("all_events", self.irc_log_all_events, -5) self.main_loop() def main_loop(self): # pragma: no cover while not self.has_quit: self.client.process_once(0.2) def irc_on_invalidnickname(self, connection, event): # pragma: no cover self.log.info("Nickname %s not allowed" % self.nickname) self.nickname = "%s_" % self.nickname self.log.info("Trying nickname: %s" % self.nickname) connection.nick(self.nickname) def irc_log_all_events(self, connection, event): # pragma: no cover self.log.debug("Received IRC event: %s" % event.type) def irc_on_join(self, connection, event): # pragma: no cover self.log.info("Joined IRC channel %s" % self.channel) self.log.info("Sending IRC message %s" % self.message) connection.privmsg(self.channel, self.message) connection.quit() def irc_on_passwdmismatch(self, connection, event): # pragma: no cover raise Exception("IRC password required") self.has_quit = True def irc_on_connect(self, connection, event): # pragma: no cover self.log.info("Connected to IRC network.") connection.join(self.channel) def irc_on_disconnect(self, connection, event): # pragma: no cover self.log.info("Disconnecting from IRC network.") self.has_quit = True
{ "content_hash": "7bb98ee8c56f3abea7b0f2e05438a962", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 76, "avg_line_length": 38.86046511627907, "alnum_prop": 0.5774985038898863, "repo_name": "marvinpinto/irc-hooky", "id": "e63ad19e20761bdc7b21fcbabef5c72e4f4bcd78", "size": "3342", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "irc_hooky/irc_client.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "53" }, { "name": "Makefile", "bytes": "2090" }, { "name": "Python", "bytes": "69327" } ], "symlink_target": "" }
""" Base classes for writing management commands (named commands which can be executed through ``django-admin.py`` or ``manage.py``). """ from __future__ import unicode_literals import os import sys from optparse import make_option, OptionParser import django from django.core.exceptions import ImproperlyConfigured from django.core.management.color import color_style, no_style from django.utils.encoding import force_str from django.utils.six import StringIO class CommandError(Exception): """ Exception class indicating a problem while executing a management command. If this exception is raised during the execution of a management command, it will be caught and turned into a nicely-printed error message to the appropriate output stream (i.e., stderr); as a result, raising this exception (with a sensible description of the error) is the preferred way to indicate that something has gone wrong in the execution of a command. """ pass def handle_default_options(options): """ Include any default options that all commands should accept here so that ManagementUtility can handle them before searching for user commands. """ if options.settings: os.environ['DJANGO_SETTINGS_MODULE'] = options.settings if options.pythonpath: sys.path.insert(0, options.pythonpath) class OutputWrapper(object): """ Wrapper around stdout/stderr """ def __init__(self, out, style_func=None, ending='\n'): self._out = out self.style_func = None if hasattr(out, 'isatty') and out.isatty(): self.style_func = style_func self.ending = ending def __getattr__(self, name): return getattr(self._out, name) def write(self, msg, style_func=None, ending=None): ending = self.ending if ending is None else ending if ending and not msg.endswith(ending): msg += ending style_func = [f for f in (style_func, self.style_func, lambda x:x) if f is not None][0] self._out.write(force_str(style_func(msg))) class BaseCommand(object): """ The base class from which all management commands ultimately derive. Use this class if you want access to all of the mechanisms which parse the command-line arguments and work out what code to call in response; if you don't need to change any of that behavior, consider using one of the subclasses defined in this file. If you are interested in overriding/customizing various aspects of the command-parsing and -execution behavior, the normal flow works as follows: 1. ``django-admin.py`` or ``manage.py`` loads the command class and calls its ``run_from_argv()`` method. 2. The ``run_from_argv()`` method calls ``create_parser()`` to get an ``OptionParser`` for the arguments, parses them, performs any environment changes requested by options like ``pythonpath``, and then calls the ``execute()`` method, passing the parsed arguments. 3. The ``execute()`` method attempts to carry out the command by calling the ``handle()`` method with the parsed arguments; any output produced by ``handle()`` will be printed to standard output and, if the command is intended to produce a block of SQL statements, will be wrapped in ``BEGIN`` and ``COMMIT``. 4. If ``handle()`` or ``execute()`` raised any exception (e.g. ``CommandError``), ``run_from_argv()`` will instead print an error message to ``stderr``. Thus, the ``handle()`` method is typically the starting point for subclasses; many built-in commands and command types either place all of their logic in ``handle()``, or perform some additional parsing work in ``handle()`` and then delegate from it to more specialized methods as needed. Several attributes affect behavior at various steps along the way: ``args`` A string listing the arguments accepted by the command, suitable for use in help messages; e.g., a command which takes a list of application names might set this to '<appname appname ...>'. ``can_import_settings`` A boolean indicating whether the command needs to be able to import Django settings; if ``True``, ``execute()`` will verify that this is possible before proceeding. Default value is ``True``. ``help`` A short description of the command, which will be printed in help messages. ``option_list`` This is the list of ``optparse`` options which will be fed into the command's ``OptionParser`` for parsing arguments. ``output_transaction`` A boolean indicating whether the command outputs SQL statements; if ``True``, the output will automatically be wrapped with ``BEGIN;`` and ``COMMIT;``. Default value is ``False``. ``requires_model_validation`` A boolean; if ``True``, validation of installed models will be performed prior to executing the command. Default value is ``True``. To validate an individual application's models rather than all applications' models, call ``self.validate(app)`` from ``handle()``, where ``app`` is the application's Python module. ``leave_locale_alone`` A boolean indicating whether the locale set in settings should be preserved during the execution of the command instead of being forcibly set to 'en-us'. Default value is ``False``. Make sure you know what you are doing if you decide to change the value of this option in your custom command if it creates database content that is locale-sensitive and such content shouldn't contain any translations (like it happens e.g. with django.contrim.auth permissions) as making the locale differ from the de facto default 'en-us' might cause unintended effects. This option can't be False when the can_import_settings option is set to False too because attempting to set the locale needs access to settings. This condition will generate a CommandError. """ # Metadata about this command. option_list = ( make_option('-v', '--verbosity', action='store', dest='verbosity', default='1', type='choice', choices=['0', '1', '2', '3'], help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output'), make_option('--settings', help='The Python path to a settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.'), make_option('--pythonpath', help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".'), make_option('--traceback', action='store_true', help='Raise on exception'), make_option('--no-color', action='store_true', dest='no_color', default=False, help="Don't colorize the command output."), ) help = '' args = '' # Configuration shortcuts that alter various logic. can_import_settings = True requires_model_validation = True output_transaction = False # Whether to wrap the output in a "BEGIN; COMMIT;" leave_locale_alone = False def __init__(self): self.style = color_style() def get_version(self): """ Return the Django version, which should be correct for all built-in Django commands. User-supplied commands should override this method. """ return django.get_version() def usage(self, subcommand): """ Return a brief description of how to use this command, by default from the attribute ``self.help``. """ usage = '%%prog %s [options] %s' % (subcommand, self.args) if self.help: return '%s\n\n%s' % (usage, self.help) else: return usage def create_parser(self, prog_name, subcommand): """ Create and return the ``OptionParser`` which will be used to parse the arguments to this command. """ return OptionParser(prog=prog_name, usage=self.usage(subcommand), version=self.get_version(), option_list=self.option_list) def print_help(self, prog_name, subcommand): """ Print the help message for this command, derived from ``self.usage()``. """ parser = self.create_parser(prog_name, subcommand) parser.print_help() def run_from_argv(self, argv): """ Set up any environment changes requested (e.g., Python path and Django settings), then run this command. If the command raises a ``CommandError``, intercept it and print it sensibly to stderr. If the ``--traceback`` option is present or the raised ``Exception`` is not ``CommandError``, raise it. """ parser = self.create_parser(argv[0], argv[1]) options, args = parser.parse_args(argv[2:]) handle_default_options(options) try: self.execute(*args, **options.__dict__) except Exception as e: if options.traceback or not isinstance(e, CommandError): raise # self.stderr is not guaranteed to be set here stderr = getattr(self, 'stderr', OutputWrapper(sys.stderr, self.style.ERROR)) stderr.write('%s: %s' % (e.__class__.__name__, e)) sys.exit(1) def execute(self, *args, **options): """ Try to execute this command, performing model validation if needed (as controlled by the attribute ``self.requires_model_validation``, except if force-skipped). """ self.stdout = OutputWrapper(options.get('stdout', sys.stdout)) if options.get('no_color'): self.style = no_style() self.stderr = OutputWrapper(options.get('stderr', sys.stderr)) else: self.stderr = OutputWrapper(options.get('stderr', sys.stderr), self.style.ERROR) if self.can_import_settings: from django.conf import settings # NOQA saved_locale = None if not self.leave_locale_alone: # Only mess with locales if we can assume we have a working # settings file, because django.utils.translation requires settings # (The final saying about whether the i18n machinery is active will be # found in the value of the USE_I18N setting) if not self.can_import_settings: raise CommandError("Incompatible values of 'leave_locale_alone' " "(%s) and 'can_import_settings' (%s) command " "options." % (self.leave_locale_alone, self.can_import_settings)) # Switch to US English, because django-admin.py creates database # content like permissions, and those shouldn't contain any # translations. from django.utils import translation saved_locale = translation.get_language() translation.activate('en-us') try: if self.requires_model_validation and not options.get('skip_validation'): self.validate() output = self.handle(*args, **options) if output: if self.output_transaction: # This needs to be imported here, because it relies on # settings. from django.db import connections, DEFAULT_DB_ALIAS connection = connections[options.get('database', DEFAULT_DB_ALIAS)] if connection.ops.start_transaction_sql(): self.stdout.write(self.style.SQL_KEYWORD(connection.ops.start_transaction_sql())) self.stdout.write(output) if self.output_transaction: self.stdout.write('\n' + self.style.SQL_KEYWORD("COMMIT;")) finally: if saved_locale is not None: translation.activate(saved_locale) def validate(self, app=None, display_num_errors=False): """ Validates the given app, raising CommandError for any errors. If app is None, then this will validate all installed apps. """ from django.core.management.validation import get_validation_errors s = StringIO() num_errors = get_validation_errors(s, app) if num_errors: s.seek(0) error_text = s.read() raise CommandError("One or more models did not validate:\n%s" % error_text) if display_num_errors: self.stdout.write("%s error%s found" % (num_errors, '' if num_errors == 1 else 's')) def handle(self, *args, **options): """ The actual logic of the command. Subclasses must implement this method. """ raise NotImplementedError('subclasses of BaseCommand must provide a handle() method') class AppCommand(BaseCommand): """ A management command which takes one or more installed application names as arguments, and does something with each of them. Rather than implementing ``handle()``, subclasses must implement ``handle_app()``, which will be called once for each application. """ args = '<appname appname ...>' def handle(self, *app_labels, **options): from django.db import models if not app_labels: raise CommandError('Enter at least one appname.') try: app_list = [models.get_app(app_label) for app_label in app_labels] except (ImproperlyConfigured, ImportError) as e: raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e) output = [] for app in app_list: app_output = self.handle_app(app, **options) if app_output: output.append(app_output) return '\n'.join(output) def handle_app(self, app, **options): """ Perform the command's actions for ``app``, which will be the Python module corresponding to an application name given on the command line. """ raise NotImplementedError('subclasses of AppCommand must provide a handle_app() method') class LabelCommand(BaseCommand): """ A management command which takes one or more arbitrary arguments (labels) on the command line, and does something with each of them. Rather than implementing ``handle()``, subclasses must implement ``handle_label()``, which will be called once for each label. If the arguments should be names of installed applications, use ``AppCommand`` instead. """ args = '<label label ...>' label = 'label' def handle(self, *labels, **options): if not labels: raise CommandError('Enter at least one %s.' % self.label) output = [] for label in labels: label_output = self.handle_label(label, **options) if label_output: output.append(label_output) return '\n'.join(output) def handle_label(self, label, **options): """ Perform the command's actions for ``label``, which will be the string as given on the command line. """ raise NotImplementedError('subclasses of LabelCommand must provide a handle_label() method') class NoArgsCommand(BaseCommand): """ A command which takes no arguments on the command line. Rather than implementing ``handle()``, subclasses must implement ``handle_noargs()``; ``handle()`` itself is overridden to ensure no arguments are passed to the command. Attempting to pass arguments will raise ``CommandError``. """ args = '' def handle(self, *args, **options): if args: raise CommandError("Command doesn't accept any arguments") return self.handle_noargs(**options) def handle_noargs(self, **options): """ Perform this command's actions. """ raise NotImplementedError('subclasses of NoArgsCommand must provide a handle_noargs() method')
{ "content_hash": "b4eaa54981be9fc720c6f57bbcc7adb9", "timestamp": "", "source": "github", "line_count": 428, "max_line_length": 177, "avg_line_length": 38.52570093457944, "alnum_prop": 0.625265328400752, "repo_name": "rogerhu/django", "id": "ef967d021fd153daa829f08b0ecb716170ff08ca", "size": "16489", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "django/core/management/base.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "51177" }, { "name": "JavaScript", "bytes": "102290" }, { "name": "Python", "bytes": "9186415" }, { "name": "Shell", "bytes": "12137" } ], "symlink_target": "" }
from pony.orm import * db = Database() class Profile(db.Entity): username = PrimaryKey(str, 16) status = Required(str, 10) firstName = Optional(unicode, 160) lastName = Optional(unicode, 160) mean = Optional(float) requirements = Optional(Json) classrooms = Set('Classroom') topicmeans = Set('TopicMean') nodemeans = Set('NodeMean') sentEnvelopes = Set('Envelope', reverse='sender') receivedEnvelopes = Set('Envelope', reverse='receiver') class Classroom(db.Entity): ref = PrimaryKey(unicode, 40) profiles = Set(Profile) topics = Set('Topic') class Content(db.Entity): serial = PrimaryKey(str, 8) name = Optional(unicode, 160) notes = Optional(LongUnicode) text = Optional(LongUnicode) class Topic(Content): nodes = Set('Node') classrooms = Set(Classroom) topicmeans = Set('TopicMean') class Node(Content): bloom = Optional(str, 10) mobile = Optional(bool) questions = Set('Question') topics = Set(Topic) antes = Set('Node', reverse='posts') posts = Set('Node', reverse='antes') nodemeans = Set('NodeMean') class Question(Content): kind = Optional(str, 10) options = Optional(Json) node = Optional(Node) class TopicMean(db.Entity): profile = Required(Profile) topic = Required(Topic) value = Required(float) PrimaryKey(profile, topic) class NodeMean(db.Entity): profile = Required(Profile) node = Required(Node) history = Required(str, 4) value = Required(float) PrimaryKey(profile, node) class Envelope(Content): sender = Required(Profile, reverse='sentEnvelopes') receiver = Required(Profile, reverse='receivedEnvelopes') db.bind("sqlite", "olddb.sqlite", create_db=True) db.generate_mapping(create_tables=True)
{ "content_hash": "4476a1c26ad2033eadcc619da95b264b", "timestamp": "", "source": "github", "line_count": 77, "max_line_length": 61, "avg_line_length": 23.415584415584416, "alnum_prop": 0.6688851913477537, "repo_name": "yamath/xprym", "id": "f00e3bc1a4b26bdcfb1899dfab71d486b9de6947", "size": "1803", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "back/oldmodels.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "86" }, { "name": "HTML", "bytes": "19708" }, { "name": "JavaScript", "bytes": "218027" }, { "name": "Python", "bytes": "45263" }, { "name": "Shell", "bytes": "95" } ], "symlink_target": "" }
import gtk import gio import odml import odml.terminology as terminology import odml.format as format import commands from TreeView import TerminologyPopupTreeView from treemodel import PropertyModel from DragProvider import DragProvider from ChooserDialog import ChooserDialog import TextEditor COL_KEY = 0 COL_VALUE = 1 from dnd.targets import ValueDrop, PropertyDrop, SectionDrop from dnd.odmldrop import OdmlDrag, OdmlDrop from dnd.text import TextDrag, TextDrop, TextGenericDropForPropertyTV class PropertyView(TerminologyPopupTreeView): """ The Main treeview for editing properties and their value-attributes """ _section = None def __init__(self, registry): super(PropertyView, self).__init__() tv = self._treeview for name, (id, propname) in PropertyModel.ColMapper.sort_iteritems(): column = self.add_column( name=name, edit_func=self.on_edited, id=id, data=propname) if name == "Value": tv.set_expander_column(column) tv.set_headers_visible(True) tv.set_rules_hint(True) tv.show() # set up our drag provider dp = DragProvider(self._treeview) _exec = lambda cmd: self.execute(cmd) vd = ValueDrop(exec_func=_exec) pd = PropertyDrop(exec_func=_exec) sd = SectionDrop(exec_func=_exec) for target in [ OdmlDrag(mime="odml/property-ref", inst=odml.property.Property), TextDrag(mime="odml/property", inst=odml.property.Property), OdmlDrag(mime="odml/value-ref", inst=odml.value.Value), TextDrag(mime="odml/value", inst=odml.value.Value), TextDrag(mime="TEXT"), OdmlDrop(mime="odml/value-ref", target=vd, registry=registry, exec_func=_exec), OdmlDrop(mime="odml/property-ref", target=pd, registry=registry, exec_func=_exec), OdmlDrop(mime="odml/section-ref", target=sd, registry=registry, exec_func=_exec), TextDrop(mime="odml/value", target=vd), TextDrop(mime="odml/property", target=pd), TextDrop(mime="odml/section", target=sd), TextGenericDropForPropertyTV(exec_func=_exec), ]: dp.append(target) dp.execute = _exec @property def section(self): return self._section @section.setter def section(self, section): if self._section is section: return self._section = section if self.model: self.model.destroy() self.model = PropertyModel.PropertyModel(section) @property def model(self): return self._treeview.get_model() @model.setter def model(self, new_value): self._treeview.set_model(new_value) def on_selection_change(self, tree_selection): (model, tree_iter) = tree_selection.get_selected() if not tree_iter: return obj = model.get_object(tree_iter) self.on_property_select(obj) def on_property_select(self, prop): """called when a different property is selected""" pass def on_get_tooltip(self, model, path, iter, tooltip): """ set the tooltip text, if the gui queries for it """ obj = model.get_object(iter) doc = obj.document if doc and hasattr(doc, "validation_result"): errors = doc.validation_result[obj] if len(errors) > 0: tooltip.set_text("\n".join([e.msg for e in errors])) return True def on_object_edit(self, tree_iter, column_name, new_text): """ called upon an edit event of the list view updates the underlying model property that corresponds to the edited cell """ section = self.section prop = tree_iter._obj # are we editing the first_row of a <multi> value? first_row = not tree_iter.parent first_row_of_multi = first_row and tree_iter.has_child # can only edit the subvalues, but not <multi> itself if first_row_of_multi and column_name == "value": return if not first_row and column_name == "name": return cmd = None # if we edit another attribute (e.g. unit), set this for all values of this property if first_row_of_multi and column_name != "name": # editing multiple values of a property at once cmds = [] for value in prop.values: cmds.append(commands.ChangeValue( object = value, attr = [column_name, "value"], new_value = new_text)) cmd = commands.Multiple(cmds=cmds) else: # first row edit event for the value, so switch the object if column_name != "name" and first_row: prop = prop.values[0] if not (column_name == "name" and first_row): column_name = [column_name, "value"] # backup the value attribute too cmd = commands.ChangeValue( object = prop, attr = column_name, new_value = new_text) if cmd: self.execute(cmd) def on_set_mapping(self, menu, (prop, mapping_obj)): """ popup menu action: set mapping for a property """ mapstr = "%s#%s:%s" % (prop.parent.get_repository(), mapping_obj.parent.type, mapping_obj.name) cmd = commands.ChangeValue( object = prop, attr = "mapping", new_value = mapstr) self.execute(cmd) def get_popup_mapping_section(self, sec, obj): """generate the popup menu items for a certain section in the mapping-popup-menu""" for sec in sec.sections: item = self.create_menu_item(sec.name) if len(sec) > 0: item.set_submenu(self.get_popup_menu(lambda: self.get_popup_mapping_section(sec, obj))) yield item if isinstance(sec, odml.doc.Document): return yield self.create_menu_item(None) #separator for prop in sec.properties: item = self.create_menu_item(prop.name) item.connect('activate', self.on_set_mapping, (obj, prop)) yield item def get_popup_mapping_suggestions(self, prop): """ build a submenu with mapping suggestions """ repo = prop.parent.get_repository() if not repo: return None term = terminology.load(repo) menu = self.create_menu_item("Map", stock="odml-set-Mapping") submenu = self.get_popup_menu(lambda: self.get_popup_mapping_section(term, prop)) menu.set_submenu(submenu) return menu def get_popup_menu_items(self): model, path, obj = self.popup_data menu_items = self.create_popup_menu_items("odml-add-Property", "Empty Property", model.section, self.add_property, lambda sec: sec.properties, lambda prop: prop.name, stock=True) if obj is not None: # can also add value prop = obj if hasattr(obj, "_property"): # we care about the properties only prop = obj._property value_filter = lambda prop: [val for val in prop.values if val.value is not None and val.value != ""] for item in self.create_popup_menu_items("odml-add-Value", "Empty Value", prop, self.add_value, value_filter, lambda val: val.value, stock=True): menu_items.append(item) for item in self.create_popup_menu_items("Set Value", "Empty Value", prop, self.set_value, value_filter, lambda val: val.value): if item.get_submenu() is None: continue # don't want a sole Set Value item menu_items.append(item) # conditionally allow to store / load binary content val = obj if prop is obj: val = obj.value if len(obj) == 1 else None if val is not None and val.dtype == "binary": menu_items.append(self.create_menu_item("Load binary content", self.binary_load, val)) if val.data is not None: menu_items.append(self.create_menu_item("Save binary content", self.binary_save, val)) if val is not None and val.dtype == "text": menu_items.append(self.create_menu_item("Edit text in larger window", self.edit_text, val)) # if repository is set, show a menu to set mappings mapping_menu = self.get_popup_mapping_suggestions(prop) if mapping_menu: menu_items.append(mapping_menu) # cannot delete properties that are linked (they'd be override on next load), instead allow to reset them merged = prop.get_merged_equivalent() if prop is obj and merged is not None: if merged != obj: menu_items.append(self.create_menu_item("Reset to merged default", self.reset_property, obj)) else: menu_items.append(self.create_popup_menu_del_item(obj)) return menu_items def binary_load(self, widget, val): """ popup menu action: load binary content """ chooser = ChooserDialog(title="Open binary file", save=False) if val.filename is not None: # try to set the filename (if it exists) chooser.set_file(gio.File(val.filename)) chooser.show() def binary_load_file(uri): if val._encoder is None: val.encoder = "base64" val.data = gio.File(uri).read().read() chooser.on_accept = binary_load_file def binary_save(self, widget, val): """ popup menu action: load binary content """ chooser = ChooserDialog(title="Save binary file", save=True) if val.filename is not None: # suggest a filename chooser.set_current_name(val.filename) chooser.show() def binary_save_file(uri): fp = gio.File(uri).replace(etag='', make_backup=False) fp.write(val.data) fp.close() chooser.on_accept = binary_save_file def edit_text(self, widget, val): """ popup menu action: edit text in larger window """ t = TextEditor.TextEditor(val, "value") t.execute = self.execute def reset_property(self, widget, prop): """ popup menu action: reset property """ dst = prop.get_merged_equivalent().clone() cmd = commands.ReplaceObject(obj=prop, repl=dst) self.execute(cmd) def set_value(self, widget, (prop, val)): """ popup menu action: set value set the curr """ model, path, obj = self.popup_data if val is None: val = odml.Value("") else: val = val.clone() if obj is prop: obj = prop.values[0] prop = obj._property # first append, then remove to keep the constraint that a property # will always hold at least one value cmd = commands.Multiple(cmds=[ commands.AppendValue(obj=prop, val=val), commands.DeleteObject(obj=obj) ]) self.execute(cmd) def add_value(self, widget, (obj, val)): """ popup menu action: add value add a value to the selected property """ if val is None: val = odml.Value("") else: val = val.clone() cmd = commands.AppendValue(obj=obj, val=val) self.execute(cmd) def add_property(self, widget, (obj, val)): """ popup menu action: add property add a property to the active section """ if val is None: val = odml.Property(name="unnamed property", value="") else: val = val.clone() cmd = commands.AppendValue(obj=obj, val=val) self.execute(cmd)
{ "content_hash": "463d4ab68054dc282868768eea9f8dca", "timestamp": "", "source": "github", "line_count": 344, "max_line_length": 186, "avg_line_length": 35.31395348837209, "alnum_prop": 0.5784491274283833, "repo_name": "stoewer/python-odml", "id": "7668e757f9283d3abc959f2bda8ed720860e9043", "size": "12148", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "odml/gui/PropertyView.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "334930" } ], "symlink_target": "" }
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 0, transform = "RelativeDifference", sigma = 0.0, exog_count = 20, ar_order = 0);
{ "content_hash": "d36ebbb18cd7c00e0803633f130ece67", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 176, "avg_line_length": 39.42857142857143, "alnum_prop": 0.717391304347826, "repo_name": "antoinecarme/pyaf", "id": "b45fa366221438da15124ce1d5da6cb0f5c47949", "size": "276", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/artificial/transf_RelativeDifference/trend_ConstantTrend/cycle_0/ar_/test_artificial_32_RelativeDifference_ConstantTrend_0__20.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "6773299" }, { "name": "Procfile", "bytes": "24" }, { "name": "Python", "bytes": "54209093" }, { "name": "R", "bytes": "807" }, { "name": "Shell", "bytes": "3619" } ], "symlink_target": "" }
"""Module implementing the job queue handling. Locking: there's a single, large lock in the L{JobQueue} class. It's used by all other classes in this module. @var JOBQUEUE_THREADS: the number of worker threads we start for processing jobs """ import logging import errno import time import weakref import threading import itertools import operator import os try: # pylint: disable=E0611 from pyinotify import pyinotify except ImportError: import pyinotify from ganeti import asyncnotifier from ganeti import constants from ganeti import serializer from ganeti import workerpool from ganeti import locking from ganeti import luxi from ganeti import opcodes from ganeti import opcodes_base from ganeti import errors from ganeti import mcpu from ganeti import utils from ganeti import jstore import ganeti.rpc.node as rpc from ganeti import runtime from ganeti import netutils from ganeti import compat from ganeti import ht from ganeti import query from ganeti import qlang from ganeti import pathutils from ganeti import vcluster from ganeti.cmdlib import cluster JOBQUEUE_THREADS = 1 # member lock names to be passed to @ssynchronized decorator _LOCK = "_lock" _QUEUE = "_queue" #: Retrieves "id" attribute _GetIdAttr = operator.attrgetter("id") class CancelJob(Exception): """Special exception to cancel a job. """ def TimeStampNow(): """Returns the current timestamp. @rtype: tuple @return: the current time in the (seconds, microseconds) format """ return utils.SplitTime(time.time()) def _CallJqUpdate(runner, names, file_name, content): """Updates job queue file after virtualizing filename. """ virt_file_name = vcluster.MakeVirtualPath(file_name) return runner.call_jobqueue_update(names, virt_file_name, content) class _QueuedOpCode(object): """Encapsulates an opcode object. @ivar log: holds the execution log and consists of tuples of the form C{(log_serial, timestamp, level, message)} @ivar input: the OpCode we encapsulate @ivar status: the current status @ivar result: the result of the LU execution @ivar start_timestamp: timestamp for the start of the execution @ivar exec_timestamp: timestamp for the actual LU Exec() function invocation @ivar stop_timestamp: timestamp for the end of the execution """ __slots__ = ["input", "status", "result", "log", "priority", "start_timestamp", "exec_timestamp", "end_timestamp", "__weakref__"] def __init__(self, op): """Initializes instances of this class. @type op: L{opcodes.OpCode} @param op: the opcode we encapsulate """ self.input = op self.status = constants.OP_STATUS_QUEUED self.result = None self.log = [] self.start_timestamp = None self.exec_timestamp = None self.end_timestamp = None # Get initial priority (it might change during the lifetime of this opcode) self.priority = getattr(op, "priority", constants.OP_PRIO_DEFAULT) @classmethod def Restore(cls, state): """Restore the _QueuedOpCode from the serialized form. @type state: dict @param state: the serialized state @rtype: _QueuedOpCode @return: a new _QueuedOpCode instance """ obj = _QueuedOpCode.__new__(cls) obj.input = opcodes.OpCode.LoadOpCode(state["input"]) obj.status = state["status"] obj.result = state["result"] obj.log = state["log"] obj.start_timestamp = state.get("start_timestamp", None) obj.exec_timestamp = state.get("exec_timestamp", None) obj.end_timestamp = state.get("end_timestamp", None) obj.priority = state.get("priority", constants.OP_PRIO_DEFAULT) return obj def Serialize(self): """Serializes this _QueuedOpCode. @rtype: dict @return: the dictionary holding the serialized state """ return { "input": self.input.__getstate__(), "status": self.status, "result": self.result, "log": self.log, "start_timestamp": self.start_timestamp, "exec_timestamp": self.exec_timestamp, "end_timestamp": self.end_timestamp, "priority": self.priority, } class _QueuedJob(object): """In-memory job representation. This is what we use to track the user-submitted jobs. Locking must be taken care of by users of this class. @type queue: L{JobQueue} @ivar queue: the parent queue @ivar id: the job ID @type ops: list @ivar ops: the list of _QueuedOpCode that constitute the job @type log_serial: int @ivar log_serial: holds the index for the next log entry @ivar received_timestamp: the timestamp for when the job was received @ivar start_timestmap: the timestamp for start of execution @ivar end_timestamp: the timestamp for end of execution @ivar writable: Whether the job is allowed to be modified """ # pylint: disable=W0212 __slots__ = ["queue", "id", "ops", "log_serial", "ops_iter", "cur_opctx", "received_timestamp", "start_timestamp", "end_timestamp", "processor_lock", "writable", "archived", "livelock", "process_id", "__weakref__"] def AddReasons(self, pickup=False): """Extend the reason trail Add the reason for all the opcodes of this job to be executed. """ count = 0 for queued_op in self.ops: op = queued_op.input if pickup: reason_src_prefix = constants.OPCODE_REASON_SRC_PICKUP else: reason_src_prefix = constants.OPCODE_REASON_SRC_OPCODE reason_src = opcodes_base.NameToReasonSrc(op.__class__.__name__, reason_src_prefix) reason_text = "job=%d;index=%d" % (self.id, count) reason = getattr(op, "reason", []) reason.append((reason_src, reason_text, utils.EpochNano())) op.reason = reason count = count + 1 def __init__(self, queue, job_id, ops, writable): """Constructor for the _QueuedJob. @type queue: L{JobQueue} @param queue: our parent queue @type job_id: job_id @param job_id: our job id @type ops: list @param ops: the list of opcodes we hold, which will be encapsulated in _QueuedOpCodes @type writable: bool @param writable: Whether job can be modified """ if not ops: raise errors.GenericError("A job needs at least one opcode") self.queue = queue self.id = int(job_id) self.ops = [_QueuedOpCode(op) for op in ops] self.AddReasons() self.log_serial = 0 self.received_timestamp = TimeStampNow() self.start_timestamp = None self.end_timestamp = None self.archived = False self.livelock = None self.process_id = None self._InitInMemory(self, writable) assert not self.archived, "New jobs can not be marked as archived" @staticmethod def _InitInMemory(obj, writable): """Initializes in-memory variables. """ obj.writable = writable obj.ops_iter = None obj.cur_opctx = None # Read-only jobs are not processed and therefore don't need a lock if writable: obj.processor_lock = threading.Lock() else: obj.processor_lock = None def __repr__(self): status = ["%s.%s" % (self.__class__.__module__, self.__class__.__name__), "id=%s" % self.id, "ops=%s" % ",".join([op.input.Summary() for op in self.ops])] return "<%s at %#x>" % (" ".join(status), id(self)) @classmethod def Restore(cls, queue, state, writable, archived): """Restore a _QueuedJob from serialized state: @type queue: L{JobQueue} @param queue: to which queue the restored job belongs @type state: dict @param state: the serialized state @type writable: bool @param writable: Whether job can be modified @type archived: bool @param archived: Whether job was already archived @rtype: _JobQueue @return: the restored _JobQueue instance """ obj = _QueuedJob.__new__(cls) obj.queue = queue obj.id = int(state["id"]) obj.received_timestamp = state.get("received_timestamp", None) obj.start_timestamp = state.get("start_timestamp", None) obj.end_timestamp = state.get("end_timestamp", None) obj.archived = archived obj.livelock = state.get("livelock", None) obj.process_id = state.get("process_id", None) if obj.process_id is not None: obj.process_id = int(obj.process_id) obj.ops = [] obj.log_serial = 0 for op_state in state["ops"]: op = _QueuedOpCode.Restore(op_state) for log_entry in op.log: obj.log_serial = max(obj.log_serial, log_entry[0]) obj.ops.append(op) cls._InitInMemory(obj, writable) return obj def Serialize(self): """Serialize the _JobQueue instance. @rtype: dict @return: the serialized state """ return { "id": self.id, "ops": [op.Serialize() for op in self.ops], "start_timestamp": self.start_timestamp, "end_timestamp": self.end_timestamp, "received_timestamp": self.received_timestamp, "livelock": self.livelock, "process_id": self.process_id, } def CalcStatus(self): """Compute the status of this job. This function iterates over all the _QueuedOpCodes in the job and based on their status, computes the job status. The algorithm is: - if we find a cancelled, or finished with error, the job status will be the same - otherwise, the last opcode with the status one of: - waitlock - canceling - running will determine the job status - otherwise, it means either all opcodes are queued, or success, and the job status will be the same @return: the job status """ status = constants.JOB_STATUS_QUEUED all_success = True for op in self.ops: if op.status == constants.OP_STATUS_SUCCESS: continue all_success = False if op.status == constants.OP_STATUS_QUEUED: pass elif op.status == constants.OP_STATUS_WAITING: status = constants.JOB_STATUS_WAITING elif op.status == constants.OP_STATUS_RUNNING: status = constants.JOB_STATUS_RUNNING elif op.status == constants.OP_STATUS_CANCELING: status = constants.JOB_STATUS_CANCELING break elif op.status == constants.OP_STATUS_ERROR: status = constants.JOB_STATUS_ERROR # The whole job fails if one opcode failed break elif op.status == constants.OP_STATUS_CANCELED: status = constants.OP_STATUS_CANCELED break if all_success: status = constants.JOB_STATUS_SUCCESS return status def CalcPriority(self): """Gets the current priority for this job. Only unfinished opcodes are considered. When all are done, the default priority is used. @rtype: int """ priorities = [op.priority for op in self.ops if op.status not in constants.OPS_FINALIZED] if not priorities: # All opcodes are done, assume default priority return constants.OP_PRIO_DEFAULT return min(priorities) def GetLogEntries(self, newer_than): """Selectively returns the log entries. @type newer_than: None or int @param newer_than: if this is None, return all log entries, otherwise return only the log entries with serial higher than this value @rtype: list @return: the list of the log entries selected """ if newer_than is None: serial = -1 else: serial = newer_than entries = [] for op in self.ops: entries.extend(filter(lambda entry: entry[0] > serial, op.log)) return entries def MarkUnfinishedOps(self, status, result): """Mark unfinished opcodes with a given status and result. This is an utility function for marking all running or waiting to be run opcodes with a given status. Opcodes which are already finalised are not changed. @param status: a given opcode status @param result: the opcode result """ not_marked = True for op in self.ops: if op.status in constants.OPS_FINALIZED: assert not_marked, "Finalized opcodes found after non-finalized ones" continue op.status = status op.result = result not_marked = False def Finalize(self): """Marks the job as finalized. """ self.end_timestamp = TimeStampNow() def Cancel(self): """Marks job as canceled/-ing if possible. @rtype: tuple; (bool, string) @return: Boolean describing whether job was successfully canceled or marked as canceling and a text message """ status = self.CalcStatus() if status == constants.JOB_STATUS_QUEUED: self.MarkUnfinishedOps(constants.OP_STATUS_CANCELED, "Job canceled by request") self.Finalize() return (True, "Job %s canceled" % self.id) elif status == constants.JOB_STATUS_WAITING: # The worker will notice the new status and cancel the job self.MarkUnfinishedOps(constants.OP_STATUS_CANCELING, None) return (True, "Job %s will be canceled" % self.id) else: logging.debug("Job %s is no longer waiting in the queue", self.id) return (False, "Job %s is no longer waiting in the queue" % self.id) def ChangePriority(self, priority): """Changes the job priority. @type priority: int @param priority: New priority @rtype: tuple; (bool, string) @return: Boolean describing whether job's priority was successfully changed and a text message """ status = self.CalcStatus() if status in constants.JOBS_FINALIZED: return (False, "Job %s is finished" % self.id) elif status == constants.JOB_STATUS_CANCELING: return (False, "Job %s is cancelling" % self.id) else: assert status in (constants.JOB_STATUS_QUEUED, constants.JOB_STATUS_WAITING, constants.JOB_STATUS_RUNNING) changed = False for op in self.ops: if (op.status == constants.OP_STATUS_RUNNING or op.status in constants.OPS_FINALIZED): assert not changed, \ ("Found opcode for which priority should not be changed after" " priority has been changed for previous opcodes") continue assert op.status in (constants.OP_STATUS_QUEUED, constants.OP_STATUS_WAITING) changed = True # Set new priority (doesn't modify opcode input) op.priority = priority if changed: return (True, ("Priorities of pending opcodes for job %s have been" " changed to %s" % (self.id, priority))) else: return (False, "Job %s had no pending opcodes" % self.id) def SetPid(self, pid): """Sets the job's process ID @type pid: int @param pid: the process ID """ status = self.CalcStatus() if status in (constants.JOB_STATUS_QUEUED, constants.JOB_STATUS_WAITING): if self.process_id is not None: logging.warning("Replacing the process id %s of job %s with %s", self.process_id, self.id, pid) self.process_id = pid else: logging.warning("Can set pid only for queued/waiting jobs") class _OpExecCallbacks(mcpu.OpExecCbBase): def __init__(self, queue, job, op): """Initializes this class. @type queue: L{JobQueue} @param queue: Job queue @type job: L{_QueuedJob} @param job: Job object @type op: L{_QueuedOpCode} @param op: OpCode """ super(_OpExecCallbacks, self).__init__() assert queue, "Queue is missing" assert job, "Job is missing" assert op, "Opcode is missing" self._queue = queue self._job = job self._op = op def _CheckCancel(self): """Raises an exception to cancel the job if asked to. """ # Cancel here if we were asked to if self._op.status == constants.OP_STATUS_CANCELING: logging.debug("Canceling opcode") raise CancelJob() @locking.ssynchronized(_QUEUE, shared=1) def NotifyStart(self): """Mark the opcode as running, not lock-waiting. This is called from the mcpu code as a notifier function, when the LU is finally about to start the Exec() method. Of course, to have end-user visible results, the opcode must be initially (before calling into Processor.ExecOpCode) set to OP_STATUS_WAITING. """ assert self._op in self._job.ops assert self._op.status in (constants.OP_STATUS_WAITING, constants.OP_STATUS_CANCELING) # Cancel here if we were asked to self._CheckCancel() logging.debug("Opcode is now running") self._op.status = constants.OP_STATUS_RUNNING self._op.exec_timestamp = TimeStampNow() # And finally replicate the job status self._queue.UpdateJobUnlocked(self._job) @locking.ssynchronized(_QUEUE, shared=1) def NotifyRetry(self): """Mark opcode again as lock-waiting. This is called from the mcpu code just after calling PrepareRetry. The opcode will now again acquire locks (more, hopefully). """ self._op.status = constants.OP_STATUS_WAITING logging.debug("Opcode will be retried. Back to waiting.") @locking.ssynchronized(_QUEUE, shared=1) def _AppendFeedback(self, timestamp, log_type, log_msg): """Internal feedback append function, with locks """ self._job.log_serial += 1 self._op.log.append((self._job.log_serial, timestamp, log_type, log_msg)) self._queue.UpdateJobUnlocked(self._job, replicate=False) def Feedback(self, *args): """Append a log entry. """ assert len(args) < 3 if len(args) == 1: log_type = constants.ELOG_MESSAGE log_msg = args[0] else: (log_type, log_msg) = args # The time is split to make serialization easier and not lose # precision. timestamp = utils.SplitTime(time.time()) self._AppendFeedback(timestamp, log_type, log_msg) def CurrentPriority(self): """Returns current priority for opcode. """ assert self._op.status in (constants.OP_STATUS_WAITING, constants.OP_STATUS_CANCELING) # Cancel here if we were asked to self._CheckCancel() return self._op.priority def SubmitManyJobs(self, jobs): """Submits jobs for processing. See L{JobQueue.SubmitManyJobs}. """ # Locking is done in job queue return self._queue.SubmitManyJobs(jobs) def _EncodeOpError(err): """Encodes an error which occurred while processing an opcode. """ if isinstance(err, errors.GenericError): to_encode = err else: to_encode = errors.OpExecError(str(err)) return errors.EncodeException(to_encode) class _TimeoutStrategyWrapper: def __init__(self, fn): """Initializes this class. """ self._fn = fn self._next = None def _Advance(self): """Gets the next timeout if necessary. """ if self._next is None: self._next = self._fn() def Peek(self): """Returns the next timeout. """ self._Advance() return self._next def Next(self): """Returns the current timeout and advances the internal state. """ self._Advance() result = self._next self._next = None return result class _OpExecContext: def __init__(self, op, index, log_prefix, timeout_strategy_factory): """Initializes this class. """ self.op = op self.index = index self.log_prefix = log_prefix self.summary = op.input.Summary() # Create local copy to modify if getattr(op.input, opcodes_base.DEPEND_ATTR, None): self.jobdeps = op.input.depends[:] else: self.jobdeps = None self._timeout_strategy_factory = timeout_strategy_factory self._ResetTimeoutStrategy() def _ResetTimeoutStrategy(self): """Creates a new timeout strategy. """ self._timeout_strategy = \ _TimeoutStrategyWrapper(self._timeout_strategy_factory().NextAttempt) def CheckPriorityIncrease(self): """Checks whether priority can and should be increased. Called when locks couldn't be acquired. """ op = self.op # Exhausted all retries and next round should not use blocking acquire # for locks? if (self._timeout_strategy.Peek() is None and op.priority > constants.OP_PRIO_HIGHEST): logging.debug("Increasing priority") op.priority -= 1 self._ResetTimeoutStrategy() return True return False def GetNextLockTimeout(self): """Returns the next lock acquire timeout. """ return self._timeout_strategy.Next() class _JobProcessor(object): (DEFER, WAITDEP, FINISHED) = range(1, 4) def __init__(self, queue, opexec_fn, job, _timeout_strategy_factory=mcpu.LockAttemptTimeoutStrategy): """Initializes this class. """ self.queue = queue self.opexec_fn = opexec_fn self.job = job self._timeout_strategy_factory = _timeout_strategy_factory @staticmethod def _FindNextOpcode(job, timeout_strategy_factory): """Locates the next opcode to run. @type job: L{_QueuedJob} @param job: Job object @param timeout_strategy_factory: Callable to create new timeout strategy """ # Create some sort of a cache to speed up locating next opcode for future # lookups # TODO: Consider splitting _QueuedJob.ops into two separate lists, one for # pending and one for processed ops. if job.ops_iter is None: job.ops_iter = enumerate(job.ops) # Find next opcode to run while True: try: (idx, op) = job.ops_iter.next() except StopIteration: raise errors.ProgrammerError("Called for a finished job") if op.status == constants.OP_STATUS_RUNNING: # Found an opcode already marked as running raise errors.ProgrammerError("Called for job marked as running") opctx = _OpExecContext(op, idx, "Op %s/%s" % (idx + 1, len(job.ops)), timeout_strategy_factory) if op.status not in constants.OPS_FINALIZED: return opctx # This is a job that was partially completed before master daemon # shutdown, so it can be expected that some opcodes are already # completed successfully (if any did error out, then the whole job # should have been aborted and not resubmitted for processing). logging.info("%s: opcode %s already processed, skipping", opctx.log_prefix, opctx.summary) @staticmethod def _MarkWaitlock(job, op): """Marks an opcode as waiting for locks. The job's start timestamp is also set if necessary. @type job: L{_QueuedJob} @param job: Job object @type op: L{_QueuedOpCode} @param op: Opcode object """ assert op in job.ops assert op.status in (constants.OP_STATUS_QUEUED, constants.OP_STATUS_WAITING) update = False op.result = None if op.status == constants.OP_STATUS_QUEUED: op.status = constants.OP_STATUS_WAITING update = True if op.start_timestamp is None: op.start_timestamp = TimeStampNow() update = True if job.start_timestamp is None: job.start_timestamp = op.start_timestamp update = True assert op.status == constants.OP_STATUS_WAITING return update @staticmethod def _CheckDependencies(queue, job, opctx): """Checks if an opcode has dependencies and if so, processes them. @type queue: L{JobQueue} @param queue: Queue object @type job: L{_QueuedJob} @param job: Job object @type opctx: L{_OpExecContext} @param opctx: Opcode execution context @rtype: bool @return: Whether opcode will be re-scheduled by dependency tracker """ op = opctx.op result = False while opctx.jobdeps: (dep_job_id, dep_status) = opctx.jobdeps[0] (depresult, depmsg) = queue.depmgr.CheckAndRegister(job, dep_job_id, dep_status) assert ht.TNonEmptyString(depmsg), "No dependency message" logging.info("%s: %s", opctx.log_prefix, depmsg) if depresult == _JobDependencyManager.CONTINUE: # Remove dependency and continue opctx.jobdeps.pop(0) elif depresult == _JobDependencyManager.WAIT: # Need to wait for notification, dependency tracker will re-add job # to workerpool result = True break elif depresult == _JobDependencyManager.CANCEL: # Job was cancelled, cancel this job as well job.Cancel() assert op.status == constants.OP_STATUS_CANCELING break elif depresult in (_JobDependencyManager.WRONGSTATUS, _JobDependencyManager.ERROR): # Job failed or there was an error, this job must fail op.status = constants.OP_STATUS_ERROR op.result = _EncodeOpError(errors.OpExecError(depmsg)) break else: raise errors.ProgrammerError("Unknown dependency result '%s'" % depresult) return result def _ExecOpCodeUnlocked(self, opctx): """Processes one opcode and returns the result. """ op = opctx.op assert op.status in (constants.OP_STATUS_WAITING, constants.OP_STATUS_CANCELING) # The very last check if the job was cancelled before trying to execute if op.status == constants.OP_STATUS_CANCELING: return (constants.OP_STATUS_CANCELING, None) timeout = opctx.GetNextLockTimeout() try: # Make sure not to hold queue lock while calling ExecOpCode result = self.opexec_fn(op.input, _OpExecCallbacks(self.queue, self.job, op), timeout=timeout) except mcpu.LockAcquireTimeout: assert timeout is not None, "Received timeout for blocking acquire" logging.debug("Couldn't acquire locks in %0.6fs", timeout) assert op.status in (constants.OP_STATUS_WAITING, constants.OP_STATUS_CANCELING) # Was job cancelled while we were waiting for the lock? if op.status == constants.OP_STATUS_CANCELING: return (constants.OP_STATUS_CANCELING, None) # Stay in waitlock while trying to re-acquire lock return (constants.OP_STATUS_WAITING, None) except CancelJob: logging.exception("%s: Canceling job", opctx.log_prefix) assert op.status == constants.OP_STATUS_CANCELING return (constants.OP_STATUS_CANCELING, None) except Exception, err: # pylint: disable=W0703 logging.exception("%s: Caught exception in %s", opctx.log_prefix, opctx.summary) return (constants.OP_STATUS_ERROR, _EncodeOpError(err)) else: logging.debug("%s: %s successful", opctx.log_prefix, opctx.summary) return (constants.OP_STATUS_SUCCESS, result) def __call__(self, _nextop_fn=None): """Continues execution of a job. @param _nextop_fn: Callback function for tests @return: C{FINISHED} if job is fully processed, C{DEFER} if the job should be deferred and C{WAITDEP} if the dependency manager (L{_JobDependencyManager}) will re-schedule the job when appropriate """ queue = self.queue job = self.job logging.debug("Processing job %s", job.id) queue.acquire(shared=1) try: opcount = len(job.ops) assert job.writable, "Expected writable job" # Don't do anything for finalized jobs if job.CalcStatus() in constants.JOBS_FINALIZED: return self.FINISHED # Is a previous opcode still pending? if job.cur_opctx: opctx = job.cur_opctx job.cur_opctx = None else: if __debug__ and _nextop_fn: _nextop_fn() opctx = self._FindNextOpcode(job, self._timeout_strategy_factory) op = opctx.op # Consistency check assert compat.all(i.status in (constants.OP_STATUS_QUEUED, constants.OP_STATUS_CANCELING) for i in job.ops[opctx.index + 1:]) assert op.status in (constants.OP_STATUS_QUEUED, constants.OP_STATUS_WAITING, constants.OP_STATUS_CANCELING) assert (op.priority <= constants.OP_PRIO_LOWEST and op.priority >= constants.OP_PRIO_HIGHEST) waitjob = None if op.status != constants.OP_STATUS_CANCELING: assert op.status in (constants.OP_STATUS_QUEUED, constants.OP_STATUS_WAITING) # Prepare to start opcode if self._MarkWaitlock(job, op): # Write to disk queue.UpdateJobUnlocked(job) assert op.status == constants.OP_STATUS_WAITING assert job.CalcStatus() == constants.JOB_STATUS_WAITING assert job.start_timestamp and op.start_timestamp assert waitjob is None # Check if waiting for a job is necessary waitjob = self._CheckDependencies(queue, job, opctx) assert op.status in (constants.OP_STATUS_WAITING, constants.OP_STATUS_CANCELING, constants.OP_STATUS_ERROR) if not (waitjob or op.status in (constants.OP_STATUS_CANCELING, constants.OP_STATUS_ERROR)): logging.info("%s: opcode %s waiting for locks", opctx.log_prefix, opctx.summary) assert not opctx.jobdeps, "Not all dependencies were removed" queue.release() try: (op_status, op_result) = self._ExecOpCodeUnlocked(opctx) finally: queue.acquire(shared=1) op.status = op_status op.result = op_result assert not waitjob if op.status in (constants.OP_STATUS_WAITING, constants.OP_STATUS_QUEUED): # waiting: Couldn't get locks in time # queued: Queue is shutting down assert not op.end_timestamp else: # Finalize opcode op.end_timestamp = TimeStampNow() if op.status == constants.OP_STATUS_CANCELING: assert not compat.any(i.status != constants.OP_STATUS_CANCELING for i in job.ops[opctx.index:]) else: assert op.status in constants.OPS_FINALIZED if op.status == constants.OP_STATUS_QUEUED: # Queue is shutting down assert not waitjob finalize = False # Reset context job.cur_opctx = None # In no case must the status be finalized here assert job.CalcStatus() == constants.JOB_STATUS_QUEUED elif op.status == constants.OP_STATUS_WAITING or waitjob: finalize = False if not waitjob and opctx.CheckPriorityIncrease(): # Priority was changed, need to update on-disk file queue.UpdateJobUnlocked(job) # Keep around for another round job.cur_opctx = opctx assert (op.priority <= constants.OP_PRIO_LOWEST and op.priority >= constants.OP_PRIO_HIGHEST) # In no case must the status be finalized here assert job.CalcStatus() == constants.JOB_STATUS_WAITING else: # Ensure all opcodes so far have been successful assert (opctx.index == 0 or compat.all(i.status == constants.OP_STATUS_SUCCESS for i in job.ops[:opctx.index])) # Reset context job.cur_opctx = None if op.status == constants.OP_STATUS_SUCCESS: finalize = False elif op.status == constants.OP_STATUS_ERROR: # If we get here, we cannot afford to check for any consistency # any more, we just want to clean up. # TODO: Actually, it wouldn't be a bad idea to start a timer # here to kill the whole process. to_encode = errors.OpExecError("Preceding opcode failed") job.MarkUnfinishedOps(constants.OP_STATUS_ERROR, _EncodeOpError(to_encode)) finalize = True elif op.status == constants.OP_STATUS_CANCELING: job.MarkUnfinishedOps(constants.OP_STATUS_CANCELED, "Job canceled by request") finalize = True else: raise errors.ProgrammerError("Unknown status '%s'" % op.status) if opctx.index == (opcount - 1): # Finalize on last opcode finalize = True if finalize: # All opcodes have been run, finalize job job.Finalize() # Write to disk. If the job status is final, this is the final write # allowed. Once the file has been written, it can be archived anytime. queue.UpdateJobUnlocked(job) assert not waitjob if finalize: logging.info("Finished job %s, status = %s", job.id, job.CalcStatus()) return self.FINISHED assert not waitjob or queue.depmgr.JobWaiting(job) if waitjob: return self.WAITDEP else: return self.DEFER finally: assert job.writable, "Job became read-only while being processed" queue.release() def _EvaluateJobProcessorResult(depmgr, job, result): """Looks at a result from L{_JobProcessor} for a job. To be used in a L{_JobQueueWorker}. """ if result == _JobProcessor.FINISHED: # Notify waiting jobs depmgr.NotifyWaiters(job.id) elif result == _JobProcessor.DEFER: # Schedule again raise workerpool.DeferTask(priority=job.CalcPriority()) elif result == _JobProcessor.WAITDEP: # No-op, dependency manager will re-schedule pass else: raise errors.ProgrammerError("Job processor returned unknown status %s" % (result, )) class _JobQueueWorker(workerpool.BaseWorker): """The actual job workers. """ def RunTask(self, job): # pylint: disable=W0221 """Job executor. @type job: L{_QueuedJob} @param job: the job to be processed """ assert job.writable, "Expected writable job" # Ensure only one worker is active on a single job. If a job registers for # a dependency job, and the other job notifies before the first worker is # done, the job can end up in the tasklist more than once. job.processor_lock.acquire() try: return self._RunTaskInner(job) finally: job.processor_lock.release() def _RunTaskInner(self, job): """Executes a job. Must be called with per-job lock acquired. """ queue = job.queue assert queue == self.pool.queue setname_fn = lambda op: self.SetTaskName(self._GetWorkerName(job, op)) setname_fn(None) proc = mcpu.Processor(queue.context, job.id) # Create wrapper for setting thread name wrap_execop_fn = compat.partial(self._WrapExecOpCode, setname_fn, proc.ExecOpCode) _EvaluateJobProcessorResult(queue.depmgr, job, _JobProcessor(queue, wrap_execop_fn, job)()) @staticmethod def _WrapExecOpCode(setname_fn, execop_fn, op, *args, **kwargs): """Updates the worker thread name to include a short summary of the opcode. @param setname_fn: Callable setting worker thread name @param execop_fn: Callable for executing opcode (usually L{mcpu.Processor.ExecOpCode}) """ setname_fn(op) try: return execop_fn(op, *args, **kwargs) finally: setname_fn(None) @staticmethod def _GetWorkerName(job, op): """Sets the worker thread name. @type job: L{_QueuedJob} @type op: L{opcodes.OpCode} """ parts = ["Job%s" % job.id] if op: parts.append(op.TinySummary()) return "/".join(parts) class _JobQueueWorkerPool(workerpool.WorkerPool): """Simple class implementing a job-processing workerpool. """ def __init__(self, queue): super(_JobQueueWorkerPool, self).__init__("Jq", JOBQUEUE_THREADS, _JobQueueWorker) self.queue = queue class _JobDependencyManager: """Keeps track of job dependencies. """ (WAIT, ERROR, CANCEL, CONTINUE, WRONGSTATUS) = range(1, 6) def __init__(self, getstatus_fn, enqueue_fn): """Initializes this class. """ self._getstatus_fn = getstatus_fn self._enqueue_fn = enqueue_fn self._waiters = {} self._lock = locking.SharedLock("JobDepMgr") @locking.ssynchronized(_LOCK, shared=1) def GetLockInfo(self, requested): # pylint: disable=W0613 """Retrieves information about waiting jobs. @type requested: set @param requested: Requested information, see C{query.LQ_*} """ # No need to sort here, that's being done by the lock manager and query # library. There are no priorities for notifying jobs, hence all show up as # one item under "pending". return [("job/%s" % job_id, None, None, [("job", [job.id for job in waiters])]) for job_id, waiters in self._waiters.items() if waiters] @locking.ssynchronized(_LOCK, shared=1) def JobWaiting(self, job): """Checks if a job is waiting. """ return compat.any(job in jobs for jobs in self._waiters.values()) @locking.ssynchronized(_LOCK) def CheckAndRegister(self, job, dep_job_id, dep_status): """Checks if a dependency job has the requested status. If the other job is not yet in a finalized status, the calling job will be notified (re-added to the workerpool) at a later point. @type job: L{_QueuedJob} @param job: Job object @type dep_job_id: int @param dep_job_id: ID of dependency job @type dep_status: list @param dep_status: Required status """ assert ht.TJobId(job.id) assert ht.TJobId(dep_job_id) assert ht.TListOf(ht.TElemOf(constants.JOBS_FINALIZED))(dep_status) if job.id == dep_job_id: return (self.ERROR, "Job can't depend on itself") # Get status of dependency job try: status = self._getstatus_fn(dep_job_id) except errors.JobLost, err: return (self.ERROR, "Dependency error: %s" % err) assert status in constants.JOB_STATUS_ALL job_id_waiters = self._waiters.setdefault(dep_job_id, set()) if status not in constants.JOBS_FINALIZED: # Register for notification and wait for job to finish job_id_waiters.add(job) return (self.WAIT, "Need to wait for job %s, wanted status '%s'" % (dep_job_id, dep_status)) # Remove from waiters list if job in job_id_waiters: job_id_waiters.remove(job) if (status == constants.JOB_STATUS_CANCELED and constants.JOB_STATUS_CANCELED not in dep_status): return (self.CANCEL, "Dependency job %s was cancelled" % dep_job_id) elif not dep_status or status in dep_status: return (self.CONTINUE, "Dependency job %s finished with status '%s'" % (dep_job_id, status)) else: return (self.WRONGSTATUS, "Dependency job %s finished with status '%s'," " not one of '%s' as required" % (dep_job_id, status, utils.CommaJoin(dep_status))) def _RemoveEmptyWaitersUnlocked(self): """Remove all jobs without actual waiters. """ for job_id in [job_id for (job_id, waiters) in self._waiters.items() if not waiters]: del self._waiters[job_id] def NotifyWaiters(self, job_id): """Notifies all jobs waiting for a certain job ID. @attention: Do not call until L{CheckAndRegister} returned a status other than C{WAITDEP} for C{job_id}, or behaviour is undefined @type job_id: int @param job_id: Job ID """ assert ht.TJobId(job_id) self._lock.acquire() try: self._RemoveEmptyWaitersUnlocked() jobs = self._waiters.pop(job_id, None) finally: self._lock.release() if jobs: # Re-add jobs to workerpool logging.debug("Re-adding %s jobs which were waiting for job %s", len(jobs), job_id) self._enqueue_fn(jobs) class JobQueue(object): """Queue used to manage the jobs. """ def __init__(self, context, cfg): """Constructor for JobQueue. The constructor will initialize the job queue object and then start loading the current jobs from disk, either for starting them (if they were queue) or for aborting them (if they were already running). @type context: GanetiContext @param context: the context object for access to the configuration data and other ganeti objects """ self.primary_jid = None self.context = context self._memcache = weakref.WeakValueDictionary() self._my_hostname = netutils.Hostname.GetSysName() # The Big JobQueue lock. If a code block or method acquires it in shared # mode safe it must guarantee concurrency with all the code acquiring it in # shared mode, including itself. In order not to acquire it at all # concurrency must be guaranteed with all code acquiring it in shared mode # and all code acquiring it exclusively. self._lock = locking.SharedLock("JobQueue") self.acquire = self._lock.acquire self.release = self._lock.release # Read serial file self._last_serial = jstore.ReadSerial() assert self._last_serial is not None, ("Serial file was modified between" " check in jstore and here") # Get initial list of nodes self._nodes = dict((n.name, n.primary_ip) for n in cfg.GetAllNodesInfo().values() if n.master_candidate) # Remove master node self._nodes.pop(self._my_hostname, None) # TODO: Check consistency across nodes self._queue_size = None self._UpdateQueueSizeUnlocked() assert ht.TInt(self._queue_size) # Job dependencies self.depmgr = _JobDependencyManager(self._GetJobStatusForDependencies, self._EnqueueJobs) # Setup worker pool self._wpool = _JobQueueWorkerPool(self) def _PickupJobUnlocked(self, job_id): """Load a job from the job queue Pick up a job that already is in the job queue and start/resume it. """ if self.primary_jid: logging.warning("Job process asked to pick up %s, but already has %s", job_id, self.primary_jid) self.primary_jid = int(job_id) job = self._LoadJobUnlocked(job_id) if job is None: logging.warning("Job %s could not be read", job_id) return job.AddReasons(pickup=True) status = job.CalcStatus() if status == constants.JOB_STATUS_QUEUED: job.SetPid(os.getpid()) self._EnqueueJobsUnlocked([job]) logging.info("Restarting job %s", job.id) elif status in (constants.JOB_STATUS_RUNNING, constants.JOB_STATUS_WAITING, constants.JOB_STATUS_CANCELING): logging.warning("Unfinished job %s found: %s", job.id, job) if status == constants.JOB_STATUS_WAITING: job.MarkUnfinishedOps(constants.OP_STATUS_QUEUED, None) job.SetPid(os.getpid()) self._EnqueueJobsUnlocked([job]) logging.info("Restarting job %s", job.id) else: to_encode = errors.OpExecError("Unclean master daemon shutdown") job.MarkUnfinishedOps(constants.OP_STATUS_ERROR, _EncodeOpError(to_encode)) job.Finalize() self.UpdateJobUnlocked(job) @locking.ssynchronized(_LOCK) def PickupJob(self, job_id): self._PickupJobUnlocked(job_id) def _GetRpc(self, address_list): """Gets RPC runner with context. """ return rpc.JobQueueRunner(self.context, address_list) @locking.ssynchronized(_LOCK) def AddNode(self, node): """Register a new node with the queue. @type node: L{objects.Node} @param node: the node object to be added """ node_name = node.name assert node_name != self._my_hostname # Clean queue directory on added node result = self._GetRpc(None).call_jobqueue_purge(node_name) msg = result.fail_msg if msg: logging.warning("Cannot cleanup queue directory on node %s: %s", node_name, msg) if not node.master_candidate: # remove if existing, ignoring errors self._nodes.pop(node_name, None) # and skip the replication of the job ids return # Upload the whole queue excluding archived jobs files = [self._GetJobPath(job_id) for job_id in self._GetJobIDsUnlocked()] # Upload current serial file files.append(pathutils.JOB_QUEUE_SERIAL_FILE) # Static address list addrs = [node.primary_ip] for file_name in files: # Read file content content = utils.ReadFile(file_name) result = _CallJqUpdate(self._GetRpc(addrs), [node_name], file_name, content) msg = result[node_name].fail_msg if msg: logging.error("Failed to upload file %s to node %s: %s", file_name, node_name, msg) msg = result[node_name].fail_msg if msg: logging.error("Failed to set queue drained flag on node %s: %s", node_name, msg) self._nodes[node_name] = node.primary_ip @locking.ssynchronized(_LOCK) def RemoveNode(self, node_name): """Callback called when removing nodes from the cluster. @type node_name: str @param node_name: the name of the node to remove """ self._nodes.pop(node_name, None) @staticmethod def _CheckRpcResult(result, nodes, failmsg): """Verifies the status of an RPC call. Since we aim to keep consistency should this node (the current master) fail, we will log errors if our rpc fail, and especially log the case when more than half of the nodes fails. @param result: the data as returned from the rpc call @type nodes: list @param nodes: the list of nodes we made the call to @type failmsg: str @param failmsg: the identifier to be used for logging """ failed = [] success = [] for node in nodes: msg = result[node].fail_msg if msg: failed.append(node) logging.error("RPC call %s (%s) failed on node %s: %s", result[node].call, failmsg, node, msg) else: success.append(node) # +1 for the master node if (len(success) + 1) < len(failed): # TODO: Handle failing nodes logging.error("More than half of the nodes failed") def _GetNodeIp(self): """Helper for returning the node name/ip list. @rtype: (list, list) @return: a tuple of two lists, the first one with the node names and the second one with the node addresses """ # TODO: Change to "tuple(map(list, zip(*self._nodes.items())))"? name_list = self._nodes.keys() addr_list = [self._nodes[name] for name in name_list] return name_list, addr_list def _UpdateJobQueueFile(self, file_name, data, replicate): """Writes a file locally and then replicates it to all nodes. This function will replace the contents of a file on the local node and then replicate it to all the other nodes we have. @type file_name: str @param file_name: the path of the file to be replicated @type data: str @param data: the new contents of the file @type replicate: boolean @param replicate: whether to spread the changes to the remote nodes """ getents = runtime.GetEnts() utils.WriteFile(file_name, data=data, uid=getents.masterd_uid, gid=getents.daemons_gid, mode=constants.JOB_QUEUE_FILES_PERMS) if replicate: names, addrs = self._GetNodeIp() result = _CallJqUpdate(self._GetRpc(addrs), names, file_name, data) self._CheckRpcResult(result, self._nodes, "Updating %s" % file_name) def _RenameFilesUnlocked(self, rename): """Renames a file locally and then replicate the change. This function will rename a file in the local queue directory and then replicate this rename to all the other nodes we have. @type rename: list of (old, new) @param rename: List containing tuples mapping old to new names """ # Rename them locally for old, new in rename: utils.RenameFile(old, new, mkdir=True) # ... and on all nodes names, addrs = self._GetNodeIp() result = self._GetRpc(addrs).call_jobqueue_rename(names, rename) self._CheckRpcResult(result, self._nodes, "Renaming files (%r)" % rename) @staticmethod def _GetJobPath(job_id): """Returns the job file for a given job id. @type job_id: str @param job_id: the job identifier @rtype: str @return: the path to the job file """ return utils.PathJoin(pathutils.QUEUE_DIR, "job-%s" % job_id) @staticmethod def _GetArchivedJobPath(job_id): """Returns the archived job file for a give job id. @type job_id: str @param job_id: the job identifier @rtype: str @return: the path to the archived job file """ return utils.PathJoin(pathutils.JOB_QUEUE_ARCHIVE_DIR, jstore.GetArchiveDirectory(job_id), "job-%s" % job_id) @staticmethod def _DetermineJobDirectories(archived): """Build list of directories containing job files. @type archived: bool @param archived: Whether to include directories for archived jobs @rtype: list """ result = [pathutils.QUEUE_DIR] if archived: archive_path = pathutils.JOB_QUEUE_ARCHIVE_DIR result.extend(map(compat.partial(utils.PathJoin, archive_path), utils.ListVisibleFiles(archive_path))) return result @classmethod def _GetJobIDsUnlocked(cls, sort=True, archived=False): """Return all known job IDs. The method only looks at disk because it's a requirement that all jobs are present on disk (so in the _memcache we don't have any extra IDs). @type sort: boolean @param sort: perform sorting on the returned job ids @rtype: list @return: the list of job IDs """ jlist = [] for path in cls._DetermineJobDirectories(archived): for filename in utils.ListVisibleFiles(path): m = constants.JOB_FILE_RE.match(filename) if m: jlist.append(int(m.group(1))) if sort: jlist.sort() return jlist def _LoadJobUnlocked(self, job_id): """Loads a job from the disk or memory. Given a job id, this will return the cached job object if existing, or try to load the job from the disk. If loading from disk, it will also add the job to the cache. @type job_id: int @param job_id: the job id @rtype: L{_QueuedJob} or None @return: either None or the job object """ assert isinstance(job_id, int), "Job queue: Supplied job id is not an int!" job = self._memcache.get(job_id, None) if job: logging.debug("Found job %s in memcache", job_id) assert job.writable, "Found read-only job in memcache" return job try: job = self._LoadJobFromDisk(job_id, False) if job is None: return job except errors.JobFileCorrupted: old_path = self._GetJobPath(job_id) new_path = self._GetArchivedJobPath(job_id) if old_path == new_path: # job already archived (future case) logging.exception("Can't parse job %s", job_id) else: # non-archived case logging.exception("Can't parse job %s, will archive.", job_id) self._RenameFilesUnlocked([(old_path, new_path)]) return None assert job.writable, "Job just loaded is not writable" self._memcache[job_id] = job logging.debug("Added job %s to the cache", job_id) return job def _LoadJobFromDisk(self, job_id, try_archived, writable=None): """Load the given job file from disk. Given a job file, read, load and restore it in a _QueuedJob format. @type job_id: int @param job_id: job identifier @type try_archived: bool @param try_archived: Whether to try loading an archived job @rtype: L{_QueuedJob} or None @return: either None or the job object """ path_functions = [(self._GetJobPath, False)] if try_archived: path_functions.append((self._GetArchivedJobPath, True)) raw_data = None archived = None for (fn, archived) in path_functions: filepath = fn(job_id) logging.debug("Loading job from %s", filepath) try: raw_data = utils.ReadFile(filepath) except EnvironmentError, err: if err.errno != errno.ENOENT: raise else: break if not raw_data: logging.debug("No data available for job %s", job_id) if int(job_id) == self.primary_jid: logging.warning("My own job file (%s) disappeared;" " this should only happy at cluster desctruction", job_id) if mcpu.lusExecuting[0] == 0: logging.warning("Not in execution; cleaning up myself due to missing" " job file") logging.shutdown() os._exit(1) # pylint: disable=W0212 return None if writable is None: writable = not archived try: data = serializer.LoadJson(raw_data) job = _QueuedJob.Restore(self, data, writable, archived) except Exception, err: # pylint: disable=W0703 raise errors.JobFileCorrupted(err) return job def SafeLoadJobFromDisk(self, job_id, try_archived, writable=None): """Load the given job file from disk. Given a job file, read, load and restore it in a _QueuedJob format. In case of error reading the job, it gets returned as None, and the exception is logged. @type job_id: int @param job_id: job identifier @type try_archived: bool @param try_archived: Whether to try loading an archived job @rtype: L{_QueuedJob} or None @return: either None or the job object """ try: return self._LoadJobFromDisk(job_id, try_archived, writable=writable) except (errors.JobFileCorrupted, EnvironmentError): logging.exception("Can't load/parse job %s", job_id) return None def _UpdateQueueSizeUnlocked(self): """Update the queue size. """ self._queue_size = len(self._GetJobIDsUnlocked(sort=False)) @classmethod def SubmitManyJobs(cls, jobs): """Create and store multiple jobs. """ return luxi.Client(address=pathutils.QUERY_SOCKET).SubmitManyJobs(jobs) @staticmethod def _FormatSubmitError(msg, ops): """Formats errors which occurred while submitting a job. """ return ("%s; opcodes %s" % (msg, utils.CommaJoin(op.Summary() for op in ops))) @staticmethod def _ResolveJobDependencies(resolve_fn, deps): """Resolves relative job IDs in dependencies. @type resolve_fn: callable @param resolve_fn: Function to resolve a relative job ID @type deps: list @param deps: Dependencies @rtype: tuple; (boolean, string or list) @return: If successful (first tuple item), the returned list contains resolved job IDs along with the requested status; if not successful, the second element is an error message """ result = [] for (dep_job_id, dep_status) in deps: if ht.TRelativeJobId(dep_job_id): assert ht.TInt(dep_job_id) and dep_job_id < 0 try: job_id = resolve_fn(dep_job_id) except IndexError: # Abort return (False, "Unable to resolve relative job ID %s" % dep_job_id) else: job_id = dep_job_id result.append((job_id, dep_status)) return (True, result) @locking.ssynchronized(_LOCK) def _EnqueueJobs(self, jobs): """Helper function to add jobs to worker pool's queue. @type jobs: list @param jobs: List of all jobs """ return self._EnqueueJobsUnlocked(jobs) def _EnqueueJobsUnlocked(self, jobs): """Helper function to add jobs to worker pool's queue. @type jobs: list @param jobs: List of all jobs """ assert self._lock.is_owned(shared=0), "Must own lock in exclusive mode" self._wpool.AddManyTasks([(job, ) for job in jobs], priority=[job.CalcPriority() for job in jobs], task_id=map(_GetIdAttr, jobs)) def _GetJobStatusForDependencies(self, job_id): """Gets the status of a job for dependencies. @type job_id: int @param job_id: Job ID @raise errors.JobLost: If job can't be found """ # Not using in-memory cache as doing so would require an exclusive lock # Try to load from disk job = self.SafeLoadJobFromDisk(job_id, True, writable=False) if job: assert not job.writable, "Got writable job" # pylint: disable=E1101 if job: return job.CalcStatus() raise errors.JobLost("Job %s not found" % job_id) def UpdateJobUnlocked(self, job, replicate=True): """Update a job's on disk storage. After a job has been modified, this function needs to be called in order to write the changes to disk and replicate them to the other nodes. @type job: L{_QueuedJob} @param job: the changed job @type replicate: boolean @param replicate: whether to replicate the change to remote nodes """ if __debug__: finalized = job.CalcStatus() in constants.JOBS_FINALIZED assert (finalized ^ (job.end_timestamp is None)) assert job.writable, "Can't update read-only job" assert not job.archived, "Can't update archived job" filename = self._GetJobPath(job.id) data = serializer.DumpJson(job.Serialize()) logging.debug("Writing job %s to %s", job.id, filename) self._UpdateJobQueueFile(filename, data, replicate) def HasJobBeenFinalized(self, job_id): """Checks if a job has been finalized. @type job_id: int @param job_id: Job identifier @rtype: boolean @return: True if the job has been finalized, False if the timeout has been reached, None if the job doesn't exist """ job = self.SafeLoadJobFromDisk(job_id, True, writable=False) if job is not None: return job.CalcStatus() in constants.JOBS_FINALIZED elif cluster.LUClusterDestroy.clusterHasBeenDestroyed: # FIXME: The above variable is a temporary workaround until the Python job # queue is completely removed. When removing the job queue, also remove # the variable from LUClusterDestroy. return True else: return None @locking.ssynchronized(_LOCK) def CancelJob(self, job_id): """Cancels a job. This will only succeed if the job has not started yet. @type job_id: int @param job_id: job ID of job to be cancelled. """ logging.info("Cancelling job %s", job_id) return self._ModifyJobUnlocked(job_id, lambda job: job.Cancel()) @locking.ssynchronized(_LOCK) def ChangeJobPriority(self, job_id, priority): """Changes a job's priority. @type job_id: int @param job_id: ID of the job whose priority should be changed @type priority: int @param priority: New priority """ logging.info("Changing priority of job %s to %s", job_id, priority) if priority not in constants.OP_PRIO_SUBMIT_VALID: allowed = utils.CommaJoin(constants.OP_PRIO_SUBMIT_VALID) raise errors.GenericError("Invalid priority %s, allowed are %s" % (priority, allowed)) def fn(job): (success, msg) = job.ChangePriority(priority) if success: try: self._wpool.ChangeTaskPriority(job.id, job.CalcPriority()) except workerpool.NoSuchTask: logging.debug("Job %s is not in workerpool at this time", job.id) return (success, msg) return self._ModifyJobUnlocked(job_id, fn) def _ModifyJobUnlocked(self, job_id, mod_fn): """Modifies a job. @type job_id: int @param job_id: Job ID @type mod_fn: callable @param mod_fn: Modifying function, receiving job object as parameter, returning tuple of (status boolean, message string) """ job = self._LoadJobUnlocked(job_id) if not job: logging.debug("Job %s not found", job_id) return (False, "Job %s not found" % job_id) assert job.writable, "Can't modify read-only job" assert not job.archived, "Can't modify archived job" (success, msg) = mod_fn(job) if success: # If the job was finalized (e.g. cancelled), this is the final write # allowed. The job can be archived anytime. self.UpdateJobUnlocked(job) return (success, msg) def _ArchiveJobsUnlocked(self, jobs): """Archives jobs. @type jobs: list of L{_QueuedJob} @param jobs: Job objects @rtype: int @return: Number of archived jobs """ archive_jobs = [] rename_files = [] for job in jobs: assert job.writable, "Can't archive read-only job" assert not job.archived, "Can't cancel archived job" if job.CalcStatus() not in constants.JOBS_FINALIZED: logging.debug("Job %s is not yet done", job.id) continue archive_jobs.append(job) old = self._GetJobPath(job.id) new = self._GetArchivedJobPath(job.id) rename_files.append((old, new)) # TODO: What if 1..n files fail to rename? self._RenameFilesUnlocked(rename_files) logging.debug("Successfully archived job(s) %s", utils.CommaJoin(job.id for job in archive_jobs)) # Since we haven't quite checked, above, if we succeeded or failed renaming # the files, we update the cached queue size from the filesystem. When we # get around to fix the TODO: above, we can use the number of actually # archived jobs to fix this. self._UpdateQueueSizeUnlocked() return len(archive_jobs) def _Query(self, fields, qfilter): qobj = query.Query(query.JOB_FIELDS, fields, qfilter=qfilter, namefield="id") # Archived jobs are only looked at if the "archived" field is referenced # either as a requested field or in the filter. By default archived jobs # are ignored. include_archived = (query.JQ_ARCHIVED in qobj.RequestedData()) job_ids = qobj.RequestedNames() list_all = (job_ids is None) if list_all: # Since files are added to/removed from the queue atomically, there's no # risk of getting the job ids in an inconsistent state. job_ids = self._GetJobIDsUnlocked(archived=include_archived) jobs = [] for job_id in job_ids: job = self.SafeLoadJobFromDisk(job_id, True, writable=False) if job is not None or not list_all: jobs.append((job_id, job)) return (qobj, jobs, list_all) def QueryJobs(self, fields, qfilter): """Returns a list of jobs in queue. @type fields: sequence @param fields: List of wanted fields @type qfilter: None or query2 filter (list) @param qfilter: Query filter """ (qobj, ctx, _) = self._Query(fields, qfilter) return query.GetQueryResponse(qobj, ctx, sort_by_name=False) def OldStyleQueryJobs(self, job_ids, fields): """Returns a list of jobs in queue. @type job_ids: list @param job_ids: sequence of job identifiers or None for all @type fields: list @param fields: names of fields to return @rtype: list @return: list one element per job, each element being list with the requested fields """ # backwards compat: job_ids = [int(jid) for jid in job_ids] qfilter = qlang.MakeSimpleFilter("id", job_ids) (qobj, ctx, _) = self._Query(fields, qfilter) return qobj.OldStyleQuery(ctx, sort_by_name=False) @locking.ssynchronized(_LOCK) def PrepareShutdown(self): """Prepare to stop the job queue. Returns whether there are any jobs currently running. If the latter is the case, the job queue is not yet ready for shutdown. Once this function returns C{True} L{Shutdown} can be called without interfering with any job. @rtype: bool @return: Whether there are any running jobs """ return self._wpool.HasRunningTasks() @locking.ssynchronized(_LOCK) def Shutdown(self): """Stops the job queue. This shutdowns all the worker threads an closes the queue. """ self._wpool.TerminateWorkers()
{ "content_hash": "6f1cec181137b799ec066a345fca2921", "timestamp": "", "source": "github", "line_count": 2146, "max_line_length": 80, "avg_line_length": 30.088536812674743, "alnum_prop": 0.6388725414279077, "repo_name": "dimara/ganeti", "id": "2b90d00d018db1af7e74c3264516d050eb149b63", "size": "65954", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/jqueue/__init__.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Haskell", "bytes": "2409763" }, { "name": "Python", "bytes": "5842471" }, { "name": "Shell", "bytes": "110549" } ], "symlink_target": "" }
""" Simple peformance tests. """ import sys import time import couchdb def main(): print 'sys.version : %r' % (sys.version,) print 'sys.platform : %r' % (sys.platform,) tests = [create_doc, create_bulk_docs] if len(sys.argv) > 1: tests = [test for test in tests if test.__name__ in sys.argv[1:]] server = couchdb.Server() for test in tests: _run(server, test) def _run(server, func): """Run a test in a clean db and log its execution time.""" sys.stdout.write("* [%s] %s ... " % (func.__name__, func.__doc__.strip())) sys.stdout.flush() db_name = 'couchdb-python/perftest' db = server.create(db_name) try: try: start = time.time() func(db) stop = time.time() sys.stdout.write("%0.2fs\n" % (stop - start,)) sys.stdout.flush() except Exception as e: sys.stdout.write("FAILED - %r\n" % (unicode(e),)) sys.stdout.flush() finally: server.delete(db_name) def create_doc(db): """Create lots of docs, one at a time""" for i in range(1000): db.save({'_id': unicode(i)}) def create_bulk_docs(db): """Create lots of docs, lots at a time""" batch_size = 100 num_batches = 1000 for i in range(num_batches): db.update([{'_id': unicode((i * batch_size) + j)} for j in range(batch_size)]) if __name__ == '__main__': main()
{ "content_hash": "55a77b3bc4416f02ab1b9f0a2ec9f185", "timestamp": "", "source": "github", "line_count": 60, "max_line_length": 86, "avg_line_length": 24.033333333333335, "alnum_prop": 0.5471567267683772, "repo_name": "kxepal/couchdb-python", "id": "bac2af7d5d3aeaaf57e970b53b46df3e5e828750", "size": "1442", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "perftest.py", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
import datetime import unittest from pamqp import decode, encode class EncodeDecodeTests(unittest.TestCase): def test_encode_decode_field_table_long_keys(self): """Encoding and decoding a field_table with too long keys.""" # second key is 126 A's + \N{PILE OF POO} data = {'A' * 256: 1, ((b'A' * 128) + b'\xf0\x9f\x92\xa9').decode('utf-8'): 2} encoded = encode.field_table(data) decoded = decode.field_table(encoded)[1] self.assertIn('A' * 128, decoded) def test_timestamp_with_dst(self): # this test assumes the system is set up using a northern hemisphere # timesone with DST (America/New_York as per github CI is fine) data = datetime.datetime(2006, 5, 21, 16, 30, 10) encoded = encode.timestamp(data) decoded = decode.timestamp(encoded)[1] self.assertEqual(decoded, data)
{ "content_hash": "f575fed891814ae16bf5f9e6ebadf852", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 76, "avg_line_length": 37.541666666666664, "alnum_prop": 0.6315205327413984, "repo_name": "gmr/pamqp", "id": "a8d4bbd74bd163782f51e4c10c9733f4a5310746", "size": "927", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "tests/test_encode_decode.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "406824" } ], "symlink_target": "" }
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from future import standard_library standard_library.install_aliases() from builtins import * # noqa import os import logging import ycm_core from collections import defaultdict from ycmd.completers.general_completer import GeneralCompleter from ycmd import identifier_utils from ycmd.utils import ToCppStringCompatible, SplitLines from ycmd import responses SYNTAX_FILENAME = 'YCM_PLACEHOLDER_FOR_SYNTAX' class IdentifierCompleter( GeneralCompleter ): def __init__( self, user_options ): super( IdentifierCompleter, self ).__init__( user_options ) self._completer = ycm_core.IdentifierCompleter() self._tags_file_last_mtime = defaultdict( int ) self._logger = logging.getLogger( __name__ ) self._max_candidates = user_options[ 'max_num_identifier_candidates' ] def ShouldUseNow( self, request_data ): return self.QueryLengthAboveMinThreshold( request_data ) def ComputeCandidates( self, request_data ): if not self.ShouldUseNow( request_data ): return [] completions = self._completer.CandidatesForQueryAndType( ToCppStringCompatible( _SanitizeQuery( request_data[ 'query' ] ) ), ToCppStringCompatible( request_data[ 'filetypes' ][ 0 ] ) ) completions = completions[ : self._max_candidates ] completions = _RemoveSmallCandidates( completions, self.user_options[ 'min_num_identifier_candidate_chars' ] ) def ConvertCompletionData( x ): return responses.BuildCompletionData( insertion_text = x, extra_menu_info='[ID]' ) return [ ConvertCompletionData( x ) for x in completions ] def AddIdentifier( self, identifier, request_data ): try: filetype = request_data[ 'filetypes' ][ 0 ] except KeyError: filetype = None filepath = request_data[ 'filepath' ] if not filetype or not filepath or not identifier: return vector = ycm_core.StringVector() vector.append( ToCppStringCompatible( identifier ) ) self._logger.info( 'Adding ONE buffer identifier for file: %s', filepath ) self._completer.AddIdentifiersToDatabase( vector, ToCppStringCompatible( filetype ), ToCppStringCompatible( filepath ) ) def AddPreviousIdentifier( self, request_data ): self.AddIdentifier( _PreviousIdentifier( self.user_options[ 'min_num_of_chars_for_completion' ], request_data ), request_data ) def AddIdentifierUnderCursor( self, request_data ): cursor_identifier = _GetCursorIdentifier( request_data ) if not cursor_identifier: return self.AddIdentifier( cursor_identifier, request_data ) def AddBufferIdentifiers( self, request_data ): try: filetype = request_data[ 'filetypes' ][ 0 ] except KeyError: filetype = None filepath = request_data[ 'filepath' ] collect_from_comments_and_strings = bool( self.user_options[ 'collect_identifiers_from_comments_and_strings' ] ) if not filetype or not filepath: return text = request_data[ 'file_data' ][ filepath ][ 'contents' ] self._logger.info( 'Adding buffer identifiers for file: %s', filepath ) self._completer.ClearForFileAndAddIdentifiersToDatabase( _IdentifiersFromBuffer( text, filetype, collect_from_comments_and_strings ), ToCppStringCompatible( filetype ), ToCppStringCompatible( filepath ) ) def AddIdentifiersFromTagFiles( self, tag_files ): absolute_paths_to_tag_files = ycm_core.StringVector() for tag_file in tag_files: try: current_mtime = os.path.getmtime( tag_file ) except: continue last_mtime = self._tags_file_last_mtime[ tag_file ] # We don't want to repeatedly process the same file over and over; we only # process if it's changed since the last time we looked at it if current_mtime <= last_mtime: continue self._tags_file_last_mtime[ tag_file ] = current_mtime absolute_paths_to_tag_files.append( ToCppStringCompatible( tag_file ) ) if not absolute_paths_to_tag_files: return self._completer.AddIdentifiersToDatabaseFromTagFiles( absolute_paths_to_tag_files ) def AddIdentifiersFromSyntax( self, keyword_list, filetypes ): keyword_vector = ycm_core.StringVector() for keyword in keyword_list: keyword_vector.append( ToCppStringCompatible( keyword ) ) filepath = SYNTAX_FILENAME + filetypes[ 0 ] self._completer.AddIdentifiersToDatabase( keyword_vector, ToCppStringCompatible( filetypes[ 0 ] ), ToCppStringCompatible( filepath ) ) def OnFileReadyToParse( self, request_data ): self.AddBufferIdentifiers( request_data ) if 'tag_files' in request_data: self.AddIdentifiersFromTagFiles( request_data[ 'tag_files' ] ) if 'syntax_keywords' in request_data: self.AddIdentifiersFromSyntax( request_data[ 'syntax_keywords' ], request_data[ 'filetypes' ] ) def OnInsertLeave( self, request_data ): self.AddIdentifierUnderCursor( request_data ) def OnCurrentIdentifierFinished( self, request_data ): self.AddPreviousIdentifier( request_data ) # This looks for the previous identifier and returns it; this might mean looking # at last identifier on the previous line if a new line has just been created. def _PreviousIdentifier( min_num_candidate_size_chars, request_data ): def PreviousIdentifierOnLine( line, column ): nearest_ident = '' for match in identifier_utils.IdentifierRegexForFiletype( filetype ).finditer( line ): if match.end() <= column: nearest_ident = match.group() return nearest_ident line_num = request_data[ 'line_num' ] - 1 column_num = request_data[ 'column_codepoint' ] - 1 filepath = request_data[ 'filepath' ] try: filetype = request_data[ 'filetypes' ][ 0 ] except KeyError: filetype = None contents_per_line = ( SplitLines( request_data[ 'file_data' ][ filepath ][ 'contents' ] ) ) ident = PreviousIdentifierOnLine( contents_per_line[ line_num ], column_num ) if ident: if len( ident ) < min_num_candidate_size_chars: return '' return ident prev_line = contents_per_line[ line_num - 1 ] ident = PreviousIdentifierOnLine( prev_line, len( prev_line ) ) if len( ident ) < min_num_candidate_size_chars: return '' return ident def _RemoveSmallCandidates( candidates, min_num_candidate_size_chars ): if min_num_candidate_size_chars == 0: return candidates return [ x for x in candidates if len( x ) >= min_num_candidate_size_chars ] def _GetCursorIdentifier( request_data ): try: filetype = request_data[ 'filetypes' ][ 0 ] except KeyError: filetype = None return identifier_utils.IdentifierAtIndex( request_data[ 'line_value' ], request_data[ 'column_codepoint' ] - 1, filetype ) def _IdentifiersFromBuffer( text, filetype, collect_from_comments_and_strings ): if not collect_from_comments_and_strings: text = identifier_utils.RemoveIdentifierFreeText( text ) idents = identifier_utils.ExtractIdentifiersFromText( text, filetype ) vector = ycm_core.StringVector() for ident in idents: vector.append( ToCppStringCompatible( ident ) ) return vector def _SanitizeQuery( query ): return query.strip()
{ "content_hash": "bb1dec61babe9ce8f02793e726f4eacf", "timestamp": "", "source": "github", "line_count": 230, "max_line_length": 80, "avg_line_length": 32.92173913043478, "alnum_prop": 0.6835710512414157, "repo_name": "rfguri/vimfiles", "id": "742a3be60001a2554eb666ce5db6a9412fff8673", "size": "8255", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "bundle/ycm/third_party/ycmd/ycmd/completers/all/identifier_completer.py", "mode": "33261", "license": "mit", "language": [], "symlink_target": "" }
from south.utils import datetime_utils as datetime from south.db import db from south.v2 import DataMigration from django.db import IntegrityError, models, transaction class Migration(DataMigration): def forwards(self, orm): db.commit_transaction() try: self._forwards(orm) except Exception: # Explicitly resume the transaction because # South is going to try and roll it back, but when # it can't find one, it'll error itself, masking # the actual exception being raised # # See https://github.com/getsentry/sentry/issues/5035 db.start_transaction() raise db.start_transaction() def _forwards(self, orm): "Write your forwards methods here." dupe_envs = orm.Environment.objects.values_list('name', 'organization_id')\ .annotate(ecount=models.Count('id'))\ .filter(ecount__gt=1) for name, organization_id in dupe_envs: envs = list( orm.Environment.objects.filter( name=name, organization_id=organization_id, ).order_by('date_added') ) to_env = envs[0] from_envs = envs[1:] try: with transaction.atomic(): orm.EnvironmentProject.objects.filter( environment__in=from_envs, ).update(environment=to_env) except IntegrityError: for ep in orm.EnvironmentProject.objects.filter(environment__in=from_envs): try: with transaction.atomic(): orm.EnvironmentProject.objects.filter( id=ep.id, ).update(environment=to_env) except IntegrityError: ep.delete() from_env_ids = [e.id for e in from_envs] try: with transaction.atomic(): orm.ReleaseEnvironment.objects.filter( environment_id__in=from_env_ids, ).update(environment_id=to_env.id) except IntegrityError: for re in orm.ReleaseEnvironment.objects.filter(environment_id__in=from_env_ids): try: with transaction.atomic(): orm.ReleaseEnvironment.objects.filter( id=re.id, ).update(environment_id=to_env.id) except IntegrityError: re.delete() orm.Environment.objects.filter(id__in=from_env_ids).delete() dupe_release_envs = orm.ReleaseEnvironment.objects.values( 'release_id', 'organization_id', 'environment_id' ).annotate(recount=models.Count('id')).filter(recount__gt=1) for renv in dupe_release_envs: release_id = renv['release_id'] organization_id = renv['organization_id'] environment_id = renv['environment_id'] renvs = list( orm.ReleaseEnvironment.objects.filter( release_id=release_id, organization_id=organization_id, environment_id=environment_id, ).order_by('first_seen') ) to_renv = renvs[0] from_renvs = renvs[1:] last_seen = max([re.last_seen for re in renvs]) orm.ReleaseEnvironment.objects.filter( id=to_renv.id, ).update(last_seen=last_seen) orm.ReleaseEnvironment.objects.filter( id__in=[re.id for re in from_renvs], ).delete() def backwards(self, orm): "Write your backwards methods here." models = { 'sentry.activity': { 'Meta': { 'object_name': 'Activity' }, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], { 'null': 'True' }), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'null': 'True' } ) }, 'sentry.apikey': { 'Meta': { 'object_name': 'ApiKey' }, 'allowed_origins': ('django.db.models.fields.TextField', [], { 'null': 'True', 'blank': 'True' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '32' }), 'label': ( 'django.db.models.fields.CharField', [], { 'default': "'Default'", 'max_length': '64', 'blank': 'True' } ), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'key_set'", 'to': "orm['sentry.Organization']" } ), 'scopes': ('django.db.models.fields.BigIntegerField', [], { 'default': 'None' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ) }, 'sentry.apitoken': { 'Meta': { 'object_name': 'ApiToken' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ApiKey']", 'null': 'True' } ), 'scopes': ('django.db.models.fields.BigIntegerField', [], { 'default': 'None' }), 'token': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '64' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.auditlogentry': { 'Meta': { 'object_name': 'AuditLogEntry' }, 'actor': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']" } ), 'actor_key': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True' } ), 'actor_label': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True', 'blank': 'True' } ), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ip_address': ( 'django.db.models.fields.GenericIPAddressField', [], { 'max_length': '39', 'null': 'True' } ), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'target_user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']" } ) }, 'sentry.authenticator': { 'Meta': { 'unique_together': "(('user', 'type'),)", 'object_name': 'Authenticator', 'db_table': "'auth_authenticator'" }, 'config': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}), 'created_at': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], { 'primary_key': 'True' }), 'last_used_at': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.authidentity': { 'Meta': { 'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity' }, 'auth_provider': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.AuthProvider']" } ), 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'last_synced': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'last_verified': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.authprovider': { 'Meta': { 'object_name': 'AuthProvider' }, 'config': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'default_global_access': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '50' }), 'default_teams': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True' } ), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '0' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_sync': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']", 'unique': 'True' } ), 'provider': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }) }, 'sentry.broadcast': { 'Meta': { 'object_name': 'Broadcast' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'date_expires': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime(2017, 3, 2, 0, 0)', 'null': 'True', 'blank': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True', 'db_index': 'True' }), 'link': ( 'django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True', 'blank': 'True' } ), 'message': ('django.db.models.fields.CharField', [], { 'max_length': '256' }), 'title': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'upstream_id': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True', 'blank': 'True' } ) }, 'sentry.broadcastseen': { 'Meta': { 'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen' }, 'broadcast': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Broadcast']" } ), 'date_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.commit': { 'Meta': { 'unique_together': "(('repository_id', 'key'),)", 'object_name': 'Commit', 'index_together': "(('repository_id', 'date_added'),)" }, 'author': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.CommitAuthor']", 'null': 'True' } ), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'message': ('django.db.models.fields.TextField', [], { 'null': 'True' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'repository_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.commitauthor': { 'Meta': { 'unique_together': "(('organization_id', 'email'),)", 'object_name': 'CommitAuthor' }, 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128', 'null': 'True' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ) }, 'sentry.commitfilechange': { 'Meta': { 'unique_together': "(('commit', 'filename'),)", 'object_name': 'CommitFileChange' }, 'commit': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Commit']" } ), 'filename': ('django.db.models.fields.CharField', [], { 'max_length': '255' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'type': ('django.db.models.fields.CharField', [], { 'max_length': '1' }) }, 'sentry.counter': { 'Meta': { 'object_name': 'Counter', 'db_table': "'sentry_projectcounter'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'unique': 'True' } ), 'value': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.dsymbundle': { 'Meta': { 'object_name': 'DSymBundle' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.DSymObject']" } ), 'sdk': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.DSymSDK']" } ) }, 'sentry.dsymobject': { 'Meta': { 'object_name': 'DSymObject' }, 'cpu_name': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object_path': ('django.db.models.fields.TextField', [], { 'db_index': 'True' }), 'uuid': ('django.db.models.fields.CharField', [], { 'max_length': '36', 'db_index': 'True' }), 'vmaddr': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'vmsize': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }) }, 'sentry.dsymsdk': { 'Meta': { 'object_name': 'DSymSDK', 'index_together': "[('version_major', 'version_minor', 'version_patchlevel', 'version_build')]" }, 'dsym_type': ('django.db.models.fields.CharField', [], { 'max_length': '20', 'db_index': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'sdk_name': ('django.db.models.fields.CharField', [], { 'max_length': '20' }), 'version_build': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'version_major': ('django.db.models.fields.IntegerField', [], {}), 'version_minor': ('django.db.models.fields.IntegerField', [], {}), 'version_patchlevel': ('django.db.models.fields.IntegerField', [], {}) }, 'sentry.dsymsymbol': { 'Meta': { 'unique_together': "[('object', 'address')]", 'object_name': 'DSymSymbol' }, 'address': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'db_index': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.DSymObject']" } ), 'symbol': ('django.db.models.fields.TextField', [], {}) }, 'sentry.environment': { 'Meta': { 'unique_together': "(('project_id', 'name'),)", 'object_name': 'Environment' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'projects': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.Project']", 'through': "orm['sentry.EnvironmentProject']", 'symmetrical': 'False' } ) }, 'sentry.environmentproject': { 'Meta': { 'unique_together': "(('project', 'environment'),)", 'object_name': 'EnvironmentProject' }, 'environment': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Environment']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) }, 'sentry.event': { 'Meta': { 'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group_id', 'datetime'),)" }, 'data': ('sentry.db.models.fields.node.NodeField', [], { 'null': 'True', 'blank': 'True' }), 'datetime': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'event_id': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True', 'db_column': "'message_id'" } ), 'group_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'message': ('django.db.models.fields.TextField', [], {}), 'platform': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'null': 'True' }) }, 'sentry.eventmapping': { 'Meta': { 'unique_together': "(('project_id', 'event_id'),)", 'object_name': 'EventMapping' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.eventprocessingissue': { 'Meta': { 'unique_together': "(('raw_event', 'processing_issue'),)", 'object_name': 'EventProcessingIssue' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'processing_issue': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.ProcessingIssue']" } ), 'raw_event': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.RawEvent']" } ) }, 'sentry.eventtag': { 'Meta': { 'unique_together': "(('event_id', 'key_id', 'value_id'),)", 'object_name': 'EventTag', 'index_together': "(('project_id', 'key_id', 'value_id'), ('group_id', 'key_id', 'value_id'))" }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}), 'value_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.eventuser': { 'Meta': { 'unique_together': "(('project', 'ident'), ('project', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))" }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75', 'null': 'True' }), 'hash': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '128', 'null': 'True' }), 'ip_address': ( 'django.db.models.fields.GenericIPAddressField', [], { 'max_length': '39', 'null': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'username': ('django.db.models.fields.CharField', [], { 'max_length': '128', 'null': 'True' }) }, 'sentry.file': { 'Meta': { 'object_name': 'File' }, 'blob': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'legacy_blob'", 'null': 'True', 'to': "orm['sentry.FileBlob']" } ), 'blobs': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.FileBlob']", 'through': "orm['sentry.FileBlobIndex']", 'symmetrical': 'False' } ), 'checksum': ('django.db.models.fields.CharField', [], { 'max_length': '40', 'null': 'True' }), 'headers': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'path': ('django.db.models.fields.TextField', [], { 'null': 'True' }), 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'timestamp': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'type': ('django.db.models.fields.CharField', [], { 'max_length': '64' }) }, 'sentry.fileblob': { 'Meta': { 'object_name': 'FileBlob' }, 'checksum': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '40' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'path': ('django.db.models.fields.TextField', [], { 'null': 'True' }), 'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'timestamp': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ) }, 'sentry.fileblobindex': { 'Meta': { 'unique_together': "(('file', 'blob', 'offset'),)", 'object_name': 'FileBlobIndex' }, 'blob': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.FileBlob']" } ), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'offset': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}) }, 'sentry.globaldsymfile': { 'Meta': { 'object_name': 'GlobalDSymFile' }, 'cpu_name': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object_name': ('django.db.models.fields.TextField', [], {}), 'uuid': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '36' }) }, 'sentry.group': { 'Meta': { 'unique_together': "(('project', 'short_id'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)" }, 'active_at': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'db_index': 'True' }), 'culprit': ( 'django.db.models.fields.CharField', [], { 'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True' } ), 'data': ( 'sentry.db.models.fields.gzippeddict.GzippedDictField', [], { 'null': 'True', 'blank': 'True' } ), 'first_release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']", 'null': 'True', 'on_delete': 'models.PROTECT' } ), 'first_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_public': ( 'django.db.models.fields.NullBooleanField', [], { 'default': 'False', 'null': 'True', 'blank': 'True' } ), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'level': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '40', 'db_index': 'True', 'blank': 'True' } ), 'logger': ( 'django.db.models.fields.CharField', [], { 'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True' } ), 'message': ('django.db.models.fields.TextField', [], {}), 'num_comments': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'null': 'True' } ), 'platform': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'resolved_at': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'db_index': 'True' }), 'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'default': '0' }), 'short_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ), 'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'default': '0' }), 'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], { 'default': '0' }), 'times_seen': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '1', 'db_index': 'True' } ) }, 'sentry.groupassignee': { 'Meta': { 'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'" }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'assignee_set'", 'to': "orm['sentry.Project']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']" } ) }, 'sentry.groupbookmark': { 'Meta': { 'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']" } ) }, 'sentry.groupcommitresolution': { 'Meta': { 'unique_together': "(('group_id', 'commit_id'),)", 'object_name': 'GroupCommitResolution' }, 'commit_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'datetime': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }) }, 'sentry.groupemailthread': { 'Meta': { 'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread' }, 'date': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'msgid': ('django.db.models.fields.CharField', [], { 'max_length': '100' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']" } ) }, 'sentry.grouphash': { 'Meta': { 'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'null': 'True' } ), 'hash': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ) }, 'sentry.groupmeta': { 'Meta': { 'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'value': ('django.db.models.fields.TextField', [], {}) }, 'sentry.groupredirect': { 'Meta': { 'object_name': 'GroupRedirect' }, 'group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'db_index': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'previous_group_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'unique': 'True' }) }, 'sentry.grouprelease': { 'Meta': { 'unique_together': "(('group_id', 'release_id', 'environment'),)", 'object_name': 'GroupRelease' }, 'environment': ('django.db.models.fields.CharField', [], { 'default': "''", 'max_length': '64' }), 'first_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'release_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ) }, 'sentry.groupresolution': { 'Meta': { 'object_name': 'GroupResolution' }, 'datetime': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'unique': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.grouprulestatus': { 'Meta': { 'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_active': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'rule': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Rule']" } ), 'status': ('django.db.models.fields.PositiveSmallIntegerField', [], { 'default': '0' }) }, 'sentry.groupseen': { 'Meta': { 'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'db_index': 'False' } ) }, 'sentry.groupsnooze': { 'Meta': { 'object_name': 'GroupSnooze' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'unique': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'until': ('django.db.models.fields.DateTimeField', [], {}) }, 'sentry.groupsubscription': { 'Meta': { 'unique_together': "(('group', 'user'),)", 'object_name': 'GroupSubscription' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'subscription_set'", 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'subscription_set'", 'to': "orm['sentry.Project']" } ), 'reason': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.grouptagkey': { 'Meta': { 'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey' }, 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.grouptagvalue': { 'Meta': { 'unique_together': "(('group', 'key', 'value'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'", 'index_together': "(('project', 'key', 'value', 'last_seen'),)" }, 'first_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'grouptag'", 'to': "orm['sentry.Group']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']" } ), 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'value': ('django.db.models.fields.CharField', [], { 'max_length': '200' }) }, 'sentry.lostpasswordhash': { 'Meta': { 'object_name': 'LostPasswordHash' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'hash': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'unique': 'True' } ) }, 'sentry.option': { 'Meta': { 'object_name': 'Option' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '64' }), 'last_updated': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}) }, 'sentry.organization': { 'Meta': { 'object_name': 'Organization' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'default_role': ('django.db.models.fields.CharField', [], { 'default': "'member'", 'max_length': '32' }), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '1' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'members': ( 'django.db.models.fields.related.ManyToManyField', [], { 'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']" } ), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'slug': ('django.db.models.fields.SlugField', [], { 'unique': 'True', 'max_length': '50' }), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.organizationaccessrequest': { 'Meta': { 'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'member': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.OrganizationMember']" } ), 'team': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Team']" } ) }, 'sentry.organizationavatar': { 'Meta': { 'object_name': 'OrganizationAvatar' }, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], { 'default': '0' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ( 'django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '32', 'db_index': 'True' } ), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.Organization']" } ) }, 'sentry.organizationmember': { 'Meta': { 'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ( 'django.db.models.fields.EmailField', [], { 'max_length': '75', 'null': 'True', 'blank': 'True' } ), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '0' }), 'has_global_access': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'member_set'", 'to': "orm['sentry.Organization']" } ), 'role': ('django.db.models.fields.CharField', [], { 'default': "'member'", 'max_length': '32' }), 'teams': ( 'django.db.models.fields.related.ManyToManyField', [], { 'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True' } ), 'token': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True' } ), 'type': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '50', 'blank': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']" } ) }, 'sentry.organizationmemberteam': { 'Meta': { 'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'" }, 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'organizationmember': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.OrganizationMember']" } ), 'team': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Team']" } ) }, 'sentry.organizationonboardingtask': { 'Meta': { 'unique_together': "(('organization', 'task'),)", 'object_name': 'OrganizationOnboardingTask' }, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_completed': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'task': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'null': 'True' } ) }, 'sentry.organizationoption': { 'Meta': { 'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}) }, 'sentry.processingissue': { 'Meta': { 'unique_together': "(('project', 'checksum', 'type'),)", 'object_name': 'ProcessingIssue' }, 'checksum': ('django.db.models.fields.CharField', [], { 'max_length': '40', 'db_index': 'True' }), 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'type': ('django.db.models.fields.CharField', [], { 'max_length': '30' }) }, 'sentry.project': { 'Meta': { 'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'first_event': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'flags': ('django.db.models.fields.BigIntegerField', [], { 'default': '0', 'null': 'True' }), 'forced_color': ( 'django.db.models.fields.CharField', [], { 'max_length': '6', 'null': 'True', 'blank': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '200' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'public': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'slug': ('django.db.models.fields.SlugField', [], { 'max_length': '50', 'null': 'True' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ), 'team': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Team']" } ) }, 'sentry.projectbookmark': { 'Meta': { 'unique_together': "(('project_id', 'user'),)", 'object_name': 'ProjectBookmark' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedBigIntegerField', [], { 'null': 'True', 'blank': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.projectdsymfile': { 'Meta': { 'unique_together': "(('project', 'uuid'),)", 'object_name': 'ProjectDSymFile' }, 'cpu_name': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'object_name': ('django.db.models.fields.TextField', [], {}), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'uuid': ('django.db.models.fields.CharField', [], { 'max_length': '36' }) }, 'sentry.projectkey': { 'Meta': { 'object_name': 'ProjectKey' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'label': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True', 'blank': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'key_set'", 'to': "orm['sentry.Project']" } ), 'public_key': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'unique': 'True', 'null': 'True' } ), 'roles': ('django.db.models.fields.BigIntegerField', [], { 'default': '1' }), 'secret_key': ( 'django.db.models.fields.CharField', [], { 'max_length': '32', 'unique': 'True', 'null': 'True' } ), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ) }, 'sentry.projectoption': { 'Meta': { 'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}) }, 'sentry.projectplatform': { 'Meta': { 'unique_together': "(('project_id', 'platform'),)", 'object_name': 'ProjectPlatform' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'platform': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'project_id': ('sentry.db.models.fields.bounded.BoundedBigIntegerField', [], {}) }, 'sentry.rawevent': { 'Meta': { 'unique_together': "(('project', 'event_id'),)", 'object_name': 'RawEvent' }, 'data': ('sentry.db.models.fields.node.NodeField', [], { 'null': 'True', 'blank': 'True' }), 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) }, 'sentry.release': { 'Meta': { 'unique_together': "(('organization', 'version'),)", 'object_name': 'Release' }, 'data': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'date_released': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'blank': 'True' }), 'date_started': ('django.db.models.fields.DateTimeField', [], { 'null': 'True', 'blank': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'owner': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True' } ), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'projects': ( 'django.db.models.fields.related.ManyToManyField', [], { 'related_name': "'releases'", 'symmetrical': 'False', 'through': "orm['sentry.ReleaseProject']", 'to': "orm['sentry.Project']" } ), 'ref': ( 'django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True', 'blank': 'True' } ), 'url': ( 'django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True', 'blank': 'True' } ), 'version': ('django.db.models.fields.CharField', [], { 'max_length': '64' }) }, 'sentry.releasecommit': { 'Meta': { 'unique_together': "(('release', 'commit'), ('release', 'order'))", 'object_name': 'ReleaseCommit' }, 'commit': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Commit']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'order': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True', 'db_index': 'True' } ), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ) }, 'sentry.releaseenvironment': { 'Meta': { 'unique_together': "(('project_id', 'release_id', 'environment_id'),)", 'object_name': 'ReleaseEnvironment', 'db_table': "'sentry_environmentrelease'" }, 'environment_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'first_seen': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'db_index': 'True' } ), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'project_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True', 'db_index': 'True' } ), 'release_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ) }, 'sentry.releasefile': { 'Meta': { 'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile' }, 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']" } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ('django.db.models.fields.CharField', [], { 'max_length': '40' }), 'name': ('django.db.models.fields.TextField', [], {}), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'project_id': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'null': 'True' }), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ) }, 'sentry.releaseproject': { 'Meta': { 'unique_together': "(('project', 'release'),)", 'object_name': 'ReleaseProject', 'db_table': "'sentry_release_project'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'new_groups': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'null': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'release': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Release']" } ) }, 'sentry.repository': { 'Meta': { 'unique_together': "(('organization_id', 'name'), ('organization_id', 'provider', 'external_id'))", 'object_name': 'Repository' }, 'config': ('sentry.db.models.fields.jsonfield.JSONField', [], { 'default': '{}' }), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'external_id': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '200' }), 'organization_id': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'db_index': 'True' } ), 'provider': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ), 'url': ('django.db.models.fields.URLField', [], { 'max_length': '200', 'null': 'True' }) }, 'sentry.reprocessingreport': { 'Meta': { 'unique_together': "(('project', 'event_id'),)", 'object_name': 'ReprocessingReport' }, 'datetime': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32', 'null': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) }, 'sentry.rule': { 'Meta': { 'object_name': 'Rule' }, 'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'label': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'status': ( 'sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0', 'db_index': 'True' } ) }, 'sentry.savedsearch': { 'Meta': { 'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch' }, 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_default': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'query': ('django.db.models.fields.TextField', [], {}) }, 'sentry.savedsearchuserdefault': { 'Meta': { 'unique_together': "(('project', 'user'),)", 'object_name': 'SavedSearchUserDefault', 'db_table': "'sentry_savedsearch_userdefault'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'savedsearch': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.SavedSearch']" } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ) }, 'sentry.tagkey': { 'Meta': { 'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'" }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'label': ('django.db.models.fields.CharField', [], { 'max_length': '64', 'null': 'True' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.tagvalue': { 'Meta': { 'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'" }, 'data': ( 'sentry.db.models.fields.gzippeddict.GzippedDictField', [], { 'null': 'True', 'blank': 'True' } ), 'first_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'last_seen': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True' } ), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }), 'value': ('django.db.models.fields.CharField', [], { 'max_length': '200' }) }, 'sentry.team': { 'Meta': { 'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team' }, 'date_added': ( 'django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now', 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'organization': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Organization']" } ), 'slug': ('django.db.models.fields.SlugField', [], { 'max_length': '50' }), 'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], { 'default': '0' }) }, 'sentry.user': { 'Meta': { 'object_name': 'User', 'db_table': "'auth_user'" }, 'date_joined': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75', 'blank': 'True' }), 'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], { 'primary_key': 'True' }), 'is_active': ('django.db.models.fields.BooleanField', [], { 'default': 'True' }), 'is_managed': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'is_password_expired': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'is_staff': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'is_superuser': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'last_login': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'last_password_change': ('django.db.models.fields.DateTimeField', [], { 'null': 'True' }), 'name': ( 'django.db.models.fields.CharField', [], { 'max_length': '200', 'db_column': "'first_name'", 'blank': 'True' } ), 'password': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'session_nonce': ('django.db.models.fields.CharField', [], { 'max_length': '12', 'null': 'True' }), 'username': ('django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '128' }) }, 'sentry.useravatar': { 'Meta': { 'object_name': 'UserAvatar' }, 'avatar_type': ('django.db.models.fields.PositiveSmallIntegerField', [], { 'default': '0' }), 'file': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.File']", 'unique': 'True', 'null': 'True', 'on_delete': 'models.SET_NULL' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'ident': ( 'django.db.models.fields.CharField', [], { 'unique': 'True', 'max_length': '32', 'db_index': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'avatar'", 'unique': 'True', 'to': "orm['sentry.User']" } ) }, 'sentry.useremail': { 'Meta': { 'unique_together': "(('user', 'email'),)", 'object_name': 'UserEmail' }, 'date_hash_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'is_verified': ('django.db.models.fields.BooleanField', [], { 'default': 'False' }), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'related_name': "'emails'", 'to': "orm['sentry.User']" } ), 'validation_hash': ( 'django.db.models.fields.CharField', [], { 'default': "u'xSM70zG7MyRUVIUcaNBY2CyvizXoGfhQ'", 'max_length': '32' } ) }, 'sentry.useroption': { 'Meta': { 'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption' }, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'key': ('django.db.models.fields.CharField', [], { 'max_length': '64' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']", 'null': 'True' } ), 'user': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.User']" } ), 'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {}) }, 'sentry.userreport': { 'Meta': { 'unique_together': "(('project', 'event_id'),)", 'object_name': 'UserReport', 'index_together': "(('project', 'event_id'), ('project', 'date_added'))" }, 'comments': ('django.db.models.fields.TextField', [], {}), 'date_added': ('django.db.models.fields.DateTimeField', [], { 'default': 'datetime.datetime.now' }), 'email': ('django.db.models.fields.EmailField', [], { 'max_length': '75' }), 'event_id': ('django.db.models.fields.CharField', [], { 'max_length': '32' }), 'group': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Group']", 'null': 'True' } ), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], { 'primary_key': 'True' }), 'name': ('django.db.models.fields.CharField', [], { 'max_length': '128' }), 'project': ( 'sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], { 'to': "orm['sentry.Project']" } ) } } complete_apps = ['sentry'] symmetrical = True
{ "content_hash": "b3fa46eedb5729a93e420fd1558c6ce4", "timestamp": "", "source": "github", "line_count": 2757, "max_line_length": 97, "avg_line_length": 36.95538628944505, "alnum_prop": 0.39922069764246315, "repo_name": "mvaled/sentry", "id": "8c48d7c2006499ad831bacd38c6b94705533f308", "size": "101910", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/sentry/south_migrations/0302_merge_environments.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "226439" }, { "name": "Dockerfile", "bytes": "6431" }, { "name": "HTML", "bytes": "173429" }, { "name": "JavaScript", "bytes": "9314175" }, { "name": "Lua", "bytes": "65885" }, { "name": "Makefile", "bytes": "9225" }, { "name": "Python", "bytes": "50385401" }, { "name": "Ruby", "bytes": "168" }, { "name": "Shell", "bytes": "5685" }, { "name": "TypeScript", "bytes": "773664" } ], "symlink_target": "" }
from __future__ import division from __future__ import unicode_literals import numpy as np from rdkit import Chem from deepchem.feat import Featurizer from deepchem.feat.mol_graphs import ConvMol, WeaveMol def one_of_k_encoding(x, allowable_set): if x not in allowable_set: raise Exception("input {0} not in allowable set{1}:".format( x, allowable_set)) return list(map(lambda s: x == s, allowable_set)) def one_of_k_encoding_unk(x, allowable_set): """Maps inputs not in the allowable set to the last element.""" if x not in allowable_set: x = allowable_set[-1] return list(map(lambda s: x == s, allowable_set)) def get_intervals(l): """For list of lists, gets the cumulative products of the lengths""" intervals = len(l) * [0] # Initalize with 1 intervals[0] = 1 for k in range(1, len(l)): intervals[k] = (len(l[k]) + 1) * intervals[k - 1] return intervals def safe_index(l, e): """Gets the index of e in l, providing an index of len(l) if not found""" try: return l.index(e) except: return len(l) possible_atom_list = [ 'C', 'N', 'O', 'S', 'F', 'P', 'Cl', 'Mg', 'Na', 'Br', 'Fe', 'Ca', 'Cu', 'Mc', 'Pd', 'Pb', 'K', 'I', 'Al', 'Ni', 'Mn' ] possible_numH_list = [0, 1, 2, 3, 4] possible_valence_list = [0, 1, 2, 3, 4, 5, 6] possible_formal_charge_list = [-3, -2, -1, 0, 1, 2, 3] possible_hybridization_list = [ Chem.rdchem.HybridizationType.SP, Chem.rdchem.HybridizationType.SP2, Chem.rdchem.HybridizationType.SP3, Chem.rdchem.HybridizationType.SP3D, Chem.rdchem.HybridizationType.SP3D2 ] possible_number_radical_e_list = [0, 1, 2] possible_chirality_list = ['R', 'S'] reference_lists = [ possible_atom_list, possible_numH_list, possible_valence_list, possible_formal_charge_list, possible_number_radical_e_list, possible_hybridization_list, possible_chirality_list ] intervals = get_intervals(reference_lists) def get_feature_list(atom): features = 6 * [0] features[0] = safe_index(possible_atom_list, atom.GetSymbol()) features[1] = safe_index(possible_numH_list, atom.GetTotalNumHs()) features[2] = safe_index(possible_valence_list, atom.GetImplicitValence()) features[3] = safe_index(possible_formal_charge_list, atom.GetFormalCharge()) features[4] = safe_index(possible_number_radical_e_list, atom.GetNumRadicalElectrons()) features[5] = safe_index(possible_hybridization_list, atom.GetHybridization()) return features def features_to_id(features, intervals): """Convert list of features into index using spacings provided in intervals""" id = 0 for k in range(len(intervals)): id += features[k] * intervals[k] # Allow 0 index to correspond to null molecule 1 id = id + 1 return id def id_to_features(id, intervals): features = 6 * [0] # Correct for null id -= 1 for k in range(0, 6 - 1): # print(6-k-1, id) features[6 - k - 1] = id // intervals[6 - k - 1] id -= features[6 - k - 1] * intervals[6 - k - 1] # Correct for last one features[0] = id return features def atom_to_id(atom): """Return a unique id corresponding to the atom type""" features = get_feature_list(atom) return features_to_id(features, intervals) def atom_features(atom, bool_id_feat=False, explicit_H=False, use_chirality=False): if bool_id_feat: return np.array([atom_to_id(atom)]) else: results = one_of_k_encoding_unk( atom.GetSymbol(), [ 'C', 'N', 'O', 'S', 'F', 'Si', 'P', 'Cl', 'Br', 'Mg', 'Na', 'Ca', 'Fe', 'As', 'Al', 'I', 'B', 'V', 'K', 'Tl', 'Yb', 'Sb', 'Sn', 'Ag', 'Pd', 'Co', 'Se', 'Ti', 'Zn', 'H', # H? 'Li', 'Ge', 'Cu', 'Au', 'Ni', 'Cd', 'In', 'Mn', 'Zr', 'Cr', 'Pt', 'Hg', 'Pb', 'Unknown' ]) + one_of_k_encoding(atom.GetDegree(), [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + \ one_of_k_encoding_unk(atom.GetImplicitValence(), [0, 1, 2, 3, 4, 5, 6]) + \ [atom.GetFormalCharge(), atom.GetNumRadicalElectrons()] + \ one_of_k_encoding_unk(atom.GetHybridization(), [ Chem.rdchem.HybridizationType.SP, Chem.rdchem.HybridizationType.SP2, Chem.rdchem.HybridizationType.SP3, Chem.rdchem.HybridizationType. SP3D, Chem.rdchem.HybridizationType.SP3D2 ]) + [atom.GetIsAromatic()] # In case of explicit hydrogen(QM8, QM9), avoid calling `GetTotalNumHs` if not explicit_H: results = results + one_of_k_encoding_unk(atom.GetTotalNumHs(), [0, 1, 2, 3, 4]) if use_chirality: try: results = results + one_of_k_encoding_unk( atom.GetProp('_CIPCode'), ['R', 'S']) + [atom.HasProp('_ChiralityPossible')] except: results = results + [False, False ] + [atom.HasProp('_ChiralityPossible')] return np.array(results) def bond_features(bond, use_chirality=False): bt = bond.GetBondType() bond_feats = [ bt == Chem.rdchem.BondType.SINGLE, bt == Chem.rdchem.BondType.DOUBLE, bt == Chem.rdchem.BondType.TRIPLE, bt == Chem.rdchem.BondType.AROMATIC, bond.GetIsConjugated(), bond.IsInRing() ] if use_chirality: bond_feats = bond_feats + one_of_k_encoding_unk( str(bond.GetStereo()), ["STEREONONE", "STEREOANY", "STEREOZ", "STEREOE"]) return np.array(bond_feats) def pair_features(mol, edge_list, canon_adj_list, bt_len=6, graph_distance=True): if graph_distance: max_distance = 7 else: max_distance = 1 N = mol.GetNumAtoms() features = np.zeros((N, N, bt_len + max_distance + 1)) num_atoms = mol.GetNumAtoms() rings = mol.GetRingInfo().AtomRings() for a1 in range(num_atoms): for a2 in canon_adj_list[a1]: # first `bt_len` features are bond features(if applicable) features[a1, a2, :bt_len] = np.asarray( edge_list[tuple(sorted((a1, a2)))], dtype=float) for ring in rings: if a1 in ring: # `bt_len`-th feature is if the pair of atoms are in the same ring features[a1, ring, bt_len] = 1 features[a1, a1, bt_len] = 0. # graph distance between two atoms if graph_distance: distance = find_distance( a1, num_atoms, canon_adj_list, max_distance=max_distance) features[a1, :, bt_len + 1:] = distance # Euclidean distance between atoms if not graph_distance: coords = np.zeros((N, 3)) for atom in range(N): pos = mol.GetConformer(0).GetAtomPosition(atom) coords[atom, :] = [pos.x, pos.y, pos.z] features[:, :, -1] = np.sqrt(np.sum(np.square( np.stack([coords] * N, axis=1) - \ np.stack([coords] * N, axis=0)), axis=2)) return features def find_distance(a1, num_atoms, canon_adj_list, max_distance=7): distance = np.zeros((num_atoms, max_distance)) radial = 0 # atoms `radial` bonds away from `a1` adj_list = set(canon_adj_list[a1]) # atoms less than `radial` bonds away all_list = set([a1]) while radial < max_distance: distance[list(adj_list), radial] = 1 all_list.update(adj_list) # find atoms `radial`+1 bonds away next_adj = set() for adj in adj_list: next_adj.update(canon_adj_list[adj]) adj_list = next_adj - all_list radial = radial + 1 return distance class ConvMolFeaturizer(Featurizer): name = ['conv_mol'] def __init__(self, master_atom=False, use_chirality=False, atom_properties=[]): """ Parameters ---------- master_atom: Boolean if true create a fake atom with bonds to every other atom. the initialization is the mean of the other atom features in the molecule. This technique is briefly discussed in Neural Message Passing for Quantum Chemistry https://arxiv.org/pdf/1704.01212.pdf use_chirality: Boolean if true then make the resulting atom features aware of the chirality of the molecules in question atom_properties: list of string or None properties in the RDKit Mol object to use as additional atom-level features in the larger molecular feature. If None, then no atom-level properties are used. Properties should be in the RDKit mol object should be in the form atom XXXXXXXX NAME where XXXXXXXX is a zero-padded 8 digit number coresponding to the zero-indexed atom index of each atom and NAME is the name of the property provided in atom_properties. So "atom 00000000 sasa" would be the name of the molecule level property in mol where the solvent accessible surface area of atom 0 would be stored. Since ConvMol is an object and not a numpy array, need to set dtype to object. """ self.dtype = object self.master_atom = master_atom self.use_chirality = use_chirality self.atom_properties = list(atom_properties) def _get_atom_properties(self, atom): """ For a given input RDKit atom return the values of the properties requested when initializing the featurize. See the __init__ of the class for a full description of the names of the properties Parameters ---------- atom: RDKit.rdchem.Atom Atom to get the properties of returns a numpy lists of floats of the same size as self.atom_properties """ values = [] for prop in self.atom_properties: mol_prop_name = str("atom %08d %s" % (atom.GetIdx(), prop)) try: values.append(float(atom.GetOwningMol().GetProp(mol_prop_name))) except KeyError: raise KeyError("No property %s found in %s in %s" % (mol_prop_name, atom.GetOwningMol(), self)) return np.array(values) def _featurize(self, mol): """Encodes mol as a ConvMol object.""" # Get the node features idx_nodes = [(a.GetIdx(), np.concatenate((atom_features( a, use_chirality=self.use_chirality), self._get_atom_properties(a)))) for a in mol.GetAtoms()] idx_nodes.sort() # Sort by ind to ensure same order as rd_kit idx, nodes = list(zip(*idx_nodes)) # Stack nodes into an array nodes = np.vstack(nodes) if self.master_atom: master_atom_features = np.expand_dims(np.mean(nodes, axis=0), axis=0) nodes = np.concatenate([nodes, master_atom_features], axis=0) # Get bond lists with reverse edges included edge_list = [ (b.GetBeginAtomIdx(), b.GetEndAtomIdx()) for b in mol.GetBonds() ] # Get canonical adjacency list canon_adj_list = [[] for mol_id in range(len(nodes))] for edge in edge_list: canon_adj_list[edge[0]].append(edge[1]) canon_adj_list[edge[1]].append(edge[0]) if self.master_atom: fake_atom_index = len(nodes) - 1 for index in range(len(nodes) - 1): canon_adj_list[index].append(fake_atom_index) return ConvMol(nodes, canon_adj_list) def feature_length(self): return 75 + len(self.atom_properties) class WeaveFeaturizer(Featurizer): name = ['weave_mol'] def __init__(self, graph_distance=True, explicit_H=False, use_chirality=False): # Distance is either graph distance(True) or Euclidean distance(False, # only support datasets providing Cartesian coordinates) self.graph_distance = graph_distance # Set dtype self.dtype = object # If includes explicit hydrogens self.explicit_H = explicit_H # If uses use_chirality self.use_chirality = use_chirality def _featurize(self, mol): """Encodes mol as a WeaveMol object.""" # Atom features idx_nodes = [(a.GetIdx(), atom_features( a, explicit_H=self.explicit_H, use_chirality=self.use_chirality)) for a in mol.GetAtoms()] idx_nodes.sort() # Sort by ind to ensure same order as rd_kit idx, nodes = list(zip(*idx_nodes)) # Stack nodes into an array nodes = np.vstack(nodes) # Get bond lists edge_list = {} for b in mol.GetBonds(): edge_list[tuple(sorted([b.GetBeginAtomIdx(), b.GetEndAtomIdx()]))] = bond_features( b, use_chirality=self.use_chirality) # Get canonical adjacency list canon_adj_list = [[] for mol_id in range(len(nodes))] for edge in edge_list.keys(): canon_adj_list[edge[0]].append(edge[1]) canon_adj_list[edge[1]].append(edge[0]) # Calculate pair features pairs = pair_features( mol, edge_list, canon_adj_list, bt_len=6, graph_distance=self.graph_distance) return WeaveMol(nodes, pairs)
{ "content_hash": "7ff251688723a32931b1de46cdba5e29", "timestamp": "", "source": "github", "line_count": 413, "max_line_length": 89, "avg_line_length": 31.818401937046005, "alnum_prop": 0.600867513887832, "repo_name": "Agent007/deepchem", "id": "06510f6c556ab1684777179b3b67526c1eed5271", "size": "13141", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "deepchem/feat/graph_features.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "16453" }, { "name": "HTML", "bytes": "20618" }, { "name": "Jupyter Notebook", "bytes": "59756" }, { "name": "Python", "bytes": "2129306" }, { "name": "Shell", "bytes": "11976" } ], "symlink_target": "" }
""" test_product Tests Product """ import sys import os from decimal import Decimal DIR = os.path.abspath(os.path.normpath( os.path.join( __file__, '..', '..', '..', '..', '..', 'trytond' ) )) if os.path.isdir(DIR): sys.path.insert(0, os.path.dirname(DIR)) import unittest import trytond.tests.test_tryton from trytond.tests.test_tryton import POOL, USER, DB_NAME, CONTEXT from trytond.transaction import Transaction from test_base import TestBase, load_json class TestProduct(TestBase): ''' Tests Product ''' def test_0010_code_fields(self): """Tests the function fields for codes """ Template = POOL.get('product.template') with Transaction().start(DB_NAME, USER, CONTEXT): self.setup_defaults() template, = Template.create([{ 'name': 'Test Product', 'default_uom': self.uom.id, 'account_expense': self.get_account_by_kind('expense'), 'account_revenue': self.get_account_by_kind('revenue'), 'export_to_amazon': True, 'products': [('create', [{ 'code': 'code1', 'list_price': Decimal('10.0'), 'cost_price': Decimal('8.0'), 'description': 'Some product description', 'codes': [('create', [{ 'code': 'BUYGBS6866', 'code_type': 'asin', }, { 'code': '123456789012', 'code_type': 'upc', }, { 'code': '1234567890123', 'code_type': 'ean', }])] }])] }]) product, = template.products self.assertEqual(product.ean.code, '1234567890123') self.assertEqual(product.upc.code, '123456789012') self.assertEqual(product.asin.code, 'BUYGBS6866') def test_0020_create_product_using_amazon_data(self): """ Tests if product is created using amazon data """ Product = POOL.get('product.product') with Transaction().start(DB_NAME, USER, CONTEXT): self.setup_defaults() with Transaction().set_context( {'current_channel': self.sale_channel.id} ): self.assertEqual(Product.search([], count=True), 0) product_data = load_json('products', 'product-2') Product.create_using_amazon_data(product_data) self.assertEqual(Product.search([], count=True), 1) def suite(): """ Test Suite """ test_suite = trytond.tests.test_tryton.suite() test_suite.addTests( unittest.TestLoader().loadTestsFromTestCase(TestProduct) ) return test_suite if __name__ == '__main__': unittest.TextTestRunner(verbosity=2).run(suite())
{ "content_hash": "423f55a6f71a4f55d472c4f07b7de657", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 71, "avg_line_length": 29.81, "alnum_prop": 0.5182824555518283, "repo_name": "priyankarani/trytond-amazon-mws", "id": "7e080fd15d89c97698f8f768d5622d202904966c", "size": "3005", "binary": false, "copies": "3", "ref": "refs/heads/develop", "path": "tests/test_product.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "276" }, { "name": "Python", "bytes": "87162" } ], "symlink_target": "" }
from msrest.service_client import ServiceClient from msrest import Configuration, Serializer, Deserializer from .version import VERSION from .operations.paths import Paths from . import models class AutoRestParameterizedHostTestClientConfiguration(Configuration): """Configuration for AutoRestParameterizedHostTestClient Note that all parameters used to create this instance are saved as instance attributes. :param host: A string value that is used as a global part of the parameterized host :type host: str :param str filepath: Existing config """ def __init__( self, host, filepath=None): if host is None: raise ValueError("Parameter 'host' must not be None.") if not isinstance(host, str): raise TypeError("Parameter 'host' must be str.") base_url = 'http://{accountName}{host}' super(AutoRestParameterizedHostTestClientConfiguration, self).__init__(base_url, filepath) self.add_user_agent('autorestparameterizedhosttestclient/{}'.format(VERSION)) self.host = host class AutoRestParameterizedHostTestClient(object): """Test Infrastructure for AutoRest :ivar config: Configuration for client. :vartype config: AutoRestParameterizedHostTestClientConfiguration :ivar paths: Paths operations :vartype paths: .operations.Paths :param host: A string value that is used as a global part of the parameterized host :type host: str :param str filepath: Existing config """ def __init__( self, host, filepath=None): self.config = AutoRestParameterizedHostTestClientConfiguration(host, filepath) self._client = ServiceClient(None, self.config) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self.paths = Paths( self._client, self.config, self._serialize, self._deserialize)
{ "content_hash": "3fdd89493373080226208200c92383df", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 98, "avg_line_length": 33.57377049180328, "alnum_prop": 0.69580078125, "repo_name": "xingwu1/autorest", "id": "14b292dfa6387815734b656fbef930e2f1637eba", "size": "2522", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/CustomBaseUri/autorestparameterizedhosttestclient/auto_rest_parameterized_host_test_client.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "12942" }, { "name": "C#", "bytes": "11523351" }, { "name": "CSS", "bytes": "110" }, { "name": "HTML", "bytes": "274" }, { "name": "Java", "bytes": "4720447" }, { "name": "JavaScript", "bytes": "4712361" }, { "name": "PowerShell", "bytes": "29614" }, { "name": "Python", "bytes": "2275107" }, { "name": "Ruby", "bytes": "246219" }, { "name": "Shell", "bytes": "423" }, { "name": "TypeScript", "bytes": "179577" } ], "symlink_target": "" }
import discord import src.CommandProcessor as CP import src.permission as perm import src.Utilities as utilis from config.auth import User import src.Database as database class Bot: # Define Bot Init to the use of self. def __init__(self): # self.commands = dir to commands # self.prefix = dir to prefix self.Token = User.Token def start(self): client = discord.Client() @client.event async def on_ready(): print('\nLogged in successfully') print('User Name: ', client.user.name) print('User ID: ', client.user.id) print('Connected Servers:') for server in client.servers: print(server) print('------------------') await utilis.morning_run(client) CP.begin(client) database.connect() @client.event async def on_server_join(server): await utilis.first_run(client, server) database.new_server(server) @client.event async def on_server_role_create(role): await utilis.new_role(role) @client.event async def on_server_role_delete(role): await utilis.role_removed(role) @client.event async def on_server_role_update(before, after): await utilis.role_change(before, after) client.run(self.Token) Bot().start()
{ "content_hash": "39e77a1ff0710660eed98d6432d8f9f0", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 55, "avg_line_length": 28.313725490196077, "alnum_prop": 0.574792243767313, "repo_name": "KvasirSGDevelopment/Aurora", "id": "783560746e9fe380f3c15b01bc740c8b0f1bc426", "size": "1444", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "src/login.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "39362" } ], "symlink_target": "" }
import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.append(os.path.abspath('.')) # make sure we are documenting the local version with autodoc sys.path.insert(0, os.path.abspath('..')) import django_assets # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.extlinks'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'django-assets' copyright = u'2012, Michael Elsdörfer' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = ".".join(map(str, django_assets.__version__)) # The full version, including alpha/beta/rc tags. release = version # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinxdoc' html_style = 'theme_customize.css' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'django-assetsdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'django-assets.tex', u'django-assets Documentation', u'Michael Elsdörfer', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'django-assets', u'django-assets Documentation', [u'Michael Elsdörfer'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'django-assets', u'django-assets Documentation', u'Michael Elsdörfer', 'django-assets', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' WEBASSETS_DOC_URL = 'http://elsdoerfer.name/docs/webassets/' intersphinx_mapping = { 'python': ('http://docs.python.org/', None), 'webassets': (WEBASSETS_DOC_URL, None), } extlinks = {'webassets': (WEBASSETS_DOC_URL+'%s.html', None)} def setup(app): from sphinx.ext import autodoc class MyDataDocumenter(autodoc.DataDocumenter): # To fetch the docstrings for the settings, Sphinx needs some help. # Without this, it would insert ugly signatures like: # my_module.SETTING = <property object at 0x193d368> priority = 20 def add_directive_header(self, sig): autodoc.ModuleLevelDocumenter.add_directive_header(self, sig) app.add_autodocumenter(MyDataDocumenter)
{ "content_hash": "d6c81dfd87f45edb591dda7feeef7e9d", "timestamp": "", "source": "github", "line_count": 259, "max_line_length": 80, "avg_line_length": 32.43629343629344, "alnum_prop": 0.7013450779669087, "repo_name": "mcfletch/django-assets", "id": "d0bf13d3ae10808ed6cfa6feb4dc6997eb0e5124", "size": "8829", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "docs/conf.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Python", "bytes": "54569" } ], "symlink_target": "" }
import datetime from cStringIO import StringIO from csvkit import CSVKitReader, CSVKitWriter from csvkit import sniffer from csvkit import typeinference from csvkit.cli import parse_column_identifiers class InvalidType(object): """ Dummy object type for Column initialization, since None is being used as a valid value. """ pass class Column(list): """ A normalized data column and inferred annotations (nullable, etc.). """ def __init__(self, order, name, l, normal_type=InvalidType, blanks_as_nulls=True): """ Construct a column from a sequence of values. If normal_type is not InvalidType, inference will be skipped and values assumed to have already been normalized. """ if normal_type != InvalidType: t = normal_type data = l else: t, data = typeinference.normalize_column_type(l, blanks_as_nulls=blanks_as_nulls) list.__init__(self, data) self.order = order self.name = name or '_unnamed' # empty column names don't make sense self.type = t def __str__(self): return str(self.__unicode__()) def __unicode__(self): """ Stringify a description of this column. """ return u'%3i: %s (%s)' % (self.order, self.name, self.type) def __getitem__(self, key): """ Return null for keys beyond the range of the column. This allows for columns to be of uneven length and still be merged into rows cleanly. """ if key >= len(self): return None return list.__getitem__(self, key) def has_nulls(self): """ Check if this column contains nulls. """ return True if None in self else False def max_length(self): """ Compute maximum length of data in this column. Returns 0 if the column does not of type ``unicode``. """ l = 0 if self.type == unicode: l = max([len(d) if d else 0 for d in self]) if self.has_nulls(): l = max(l, 4) # "None" return l class Table(list): """ A normalized data table and inferred annotations (nullable, etc.). """ def __init__(self, columns=[], name='new_table'): """ Generic constructor. You should normally use a from_* method to create a Table. """ list.__init__(self, columns) self.name = name def __str__(self): return str(self.__unicode__()) def __unicode__(self): """ Stringify a description of all columns in this table. """ return u'\n'.join([unicode(c) for c in self]) def _reindex_columns(self): """ Update order properties of all columns in table. """ for i, c in enumerate(self): c.order = i def _deduplicate_column_name(self, column): while column.name in self.headers(): try: i = column.name.rindex('_') counter = int(column.name[i + 1:]) column.name = '%s_%i' % (column.name[:i], counter + 1) except: column.name += '_2' return column.name def append(self, column): """Implements list append.""" self._deduplicate_column_name(column) list.append(self, column) column.index = len(self) - 1 def insert(self, i, column): """Implements list insert.""" self._deduplicate_column_name(column) list.insert(self, i, column) self._reindex_columns() def extend(self, columns): """Implements list extend.""" for c in columns: self._deduplicate_column_name(c) list.extend(self, columns) self._reindex_columns() def remove(self, column): """Implements list remove.""" list.remove(self, column) self._reindex_columns() def sort(self): """Forbids list sort.""" raise NotImplementedError() def reverse(self): """Forbids list reverse.""" raise NotImplementedError() def headers(self): return [c.name for c in self] def count_rows(self): lengths = [len(c) for c in self] if lengths: return max(lengths) return 0 def row(self, i): """ Fetch a row of data from this table. """ if i < 0: raise IndexError('Negative row numbers are not valid.') if i >= self.count_rows(): raise IndexError('Row number exceeds the number of rows in the table.') row_data = [c[i] for c in self] return row_data @classmethod def from_csv(cls, f, name='from_csv_table', snifflimit=None, column_ids=None, blanks_as_nulls=True, zero_based=False, **kwargs): """ Creates a new Table from a file-like object containing CSV data. Note: the column_ids argument will cause only those columns with a matching identifier to be parsed, type inferred, etc. However, their order/index property will reflect the original data (e.g. column 8 will still be "order" 7, even if it's the third column in the resulting Table. """ # This bit of nonsense is to deal with "files" from stdin, # which are not seekable and thus must be buffered contents = f.read() if snifflimit: sample = contents[:snifflimit] else: sample = contents dialect = sniffer.sniff_dialect(sample) f = StringIO(contents) reader = CSVKitReader(f, dialect=dialect, **kwargs) headers = reader.next() if column_ids: column_ids = parse_column_identifiers(column_ids, headers, zero_based) headers = [headers[c] for c in column_ids] else: column_ids = range(len(headers)) data_columns = [[] for c in headers] for row in reader: for i, d in enumerate(row): try: data_columns[i].append(row[column_ids[i]].strip()) except IndexError: # Non-rectangular data is truncated break columns = [] for i, c in enumerate(data_columns): columns.append(Column(column_ids[i], headers[i], c, blanks_as_nulls=blanks_as_nulls)) return Table(columns, name=name) def to_rows(self, serialize_dates=False): """ Generates rows from columns and performs. Optionally serialize date objects to isoformat strings. """ if serialize_dates: out_columns = [] for c in self: # Stringify datetimes, dates, and times if c.type in [datetime.datetime, datetime.date, datetime.time]: out_columns.append([unicode(v.isoformat()) if v != None else None for v in c]) else: out_columns.append(c) # Convert columns to rows return zip(*out_columns) else: return zip(*self) def to_csv(self, output, **kwargs): """ Serializes the table to CSV and writes it to any file-like object. """ rows = self.to_rows(serialize_dates=True) # Insert header row rows.insert(0, self.headers()) writer = CSVKitWriter(output, **kwargs) writer.writerows(rows)
{ "content_hash": "9e920749aa7898633fd6b8577fd56676", "timestamp": "", "source": "github", "line_count": 255, "max_line_length": 146, "avg_line_length": 29.584313725490198, "alnum_prop": 0.5597826086956522, "repo_name": "jsvine/csvkit", "id": "143b3169b25659932151782cb6dbb2498fc439f7", "size": "7567", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "csvkit/table.py", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
from django import forms from teachers.models import * # from baseapp.models import * class Teacher_detailform(forms.ModelForm): class Meta: model = Teacher_detail exclude = ('count','transfer_flag','ofs_flag','ofs_reason','ofs_date','super_annum_flag') class Teacher_outofserviceform(forms.ModelForm): model=Teacher_detail class Teacher_detailform1(forms.ModelForm): class Meta: model = Teacher_detail exclude = ('transfer_flag',) class Teacher_leave_masterform(forms.ModelForm): class Meta: model = Teacher_leave_master exclude = ('teacherid',) class Teacher_posting_entryform(forms.ModelForm): class Meta: model = Teacher_posting_entry exclude = ('complete_flag','teacherid','staff_id','school') class educationform(forms.ModelForm): class Meta: model = Teacher_edu exclude = ('complete_flag','teacherid','staff_id') class Teacher_regularisation_entryform(forms.ModelForm): class Meta: model = Teacher_regularisation_entry exclude = ('complete_flag','teacherid','staff_id') class Teacher_probation_entryform(forms.ModelForm): class Meta: model = Teacher_probation_entry exclude = ('complete_flag','teacherid','staff_id') class Teacher_relinquisform(forms.ModelForm): class Meta: model = Teacher_relinquish_entry exclude = ('complete_flag','teacherid','staff_id') class Teacher_trainingform(forms.ModelForm): class Meta: model = Teacher_training exclude = ('complete_flag','teacherid','staff_id') class Teacher_testform(forms.ModelForm): class Meta: model = Teacher_test exclude = ('complete_flag','teacherid','staff_id') class Teacher_leaveform(forms.ModelForm): class Meta: model = Teacher_leave exclude = ('complete_flag','teacherid','staff_id') class Teacher_ltcform(forms.ModelForm): class Meta: model = Teacher_ltc exclude = ('complete_flag','teacherid','staff_id') class Teacher_GPF_loanform(forms.ModelForm): class Meta: model = Teacher_GPF_loan exclude = ('complete_flag','teacherid','staff_id') class Teacher_loanform(forms.ModelForm): class Meta: model = Teacher_loan exclude = ('complete_flag','teacherid','staff_id') class Teacher_family_detailform(forms.ModelForm): class Meta: model = Teacher_family_detail exclude = ('complete_flag','teacherid','staff_id') class Teacher_movable_propertyform(forms.ModelForm): class Meta: model = Teacher_movable_property exclude = ('complete_flag','teacherid','staff_id') class Teacher_immovalble_propertyform(forms.ModelForm): class Meta: model = Teacher_immovalble_property exclude = ('complete_flag','teacherid','staff_id') class Teacher_nominiform(forms.ModelForm): class Meta: model = Teacher_nomini exclude = ('complete_flag','teacherid','staff_id') class Teacher_actionform(forms.ModelForm): class Meta: model = Teacher_action exclude = ('cleared_flag','teacherid','staff_id') class Teacher_transfer_history_form(forms.ModelForm): class Meta: model=Teacher_transfer_history class Teacher_leave_creditform(forms.ModelForm): class Meta: model = Teacher_leave_credit exclude = ('complete_flag','teacherid') class Teacher_leave_surrenderform(forms.ModelForm): class Meta: model = Teacher_leave_surrender exclude = ('complete_flag','teacherid','staff_id') class private_teachers_detailform(forms.ModelForm): class Meta: model=private_teachers_detail exclude = ('pri_tea_id','school_name') class private_educationform(forms.ModelForm): class Meta: model=Teacher_edu_private exclude = ('private_tea_id','unique_id') class Teacher_result_exam_form(forms.ModelForm): class Meta: model = Teacher_result_exam exclude = ('teacherid',) class Teacher_award_form(forms.ModelForm): class Meta: model = Teacher_award exclude = ('teacherid',) class Teacher_award_form(forms.ModelForm): class Meta: model = Teacher_award exclude = ('teacherid',) class Teacher_transfer_purpose_form(forms.ModelForm): class Meta: model=Teacher_transfer_purpose
{ "content_hash": "eaffefc9fa230ad0039c445a1ae27832", "timestamp": "", "source": "github", "line_count": 163, "max_line_length": 97, "avg_line_length": 27.11042944785276, "alnum_prop": 0.6605566870332654, "repo_name": "mravikumar281/staging-server", "id": "eb6eebca413e50c245d84af17ea254ee106334fb", "size": "4419", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "teachers/forms.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "805986" }, { "name": "HTML", "bytes": "3648803" }, { "name": "JavaScript", "bytes": "3804321" }, { "name": "Makefile", "bytes": "3152" }, { "name": "PHP", "bytes": "5016" }, { "name": "Python", "bytes": "2107084" }, { "name": "Shell", "bytes": "148" } ], "symlink_target": "" }
import queue import unittest from contextlib import contextmanager, ExitStack from unittest.mock import patch import pytest from coalib.bearlib.abstractions.LinterClass import LinterClass from coalib.testing.BearTestHelper import generate_skip_decorator from coalib.bears.LocalBear import LocalBear from coala_utils.ContextManagers import prepare_file from coalib.settings.Section import Section from coalib.settings.Setting import Setting @contextmanager def execute_bear(bear, *args, **kwargs): try: console_output = [] # For linters provide additional information, such as # stdout and stderr. with ExitStack() as stack: if isinstance(bear, LinterClass): console_output.append('The program yielded ' 'the following output:\n') old_process_output = bear.process_output def new_process_output(output, filename=None, file=None, **process_output_kwargs): if isinstance(output, tuple): stdout, stderr = output console_output.append('Stdout:\n' + stdout) console_output.append('Stderr:\n' + stderr) else: console_output.append(output) return old_process_output(output, filename, file, **process_output_kwargs) stack.enter_context(patch.object( bear, 'process_output', wraps=new_process_output)) bear_output_generator = bear.execute(*args, **kwargs) assert bear_output_generator is not None, \ 'Bear returned None on execution\n' yield bear_output_generator except Exception as err: msg = [] while not bear.message_queue.empty(): msg.append(bear.message_queue.get().message) msg += console_output raise AssertionError(str(err) + ''.join('\n' + m for m in msg)) def get_results(local_bear, lines, filename=None, force_linebreaks=True, create_tempfile=True, tempfile_kwargs={}, settings={}): if local_bear.BEAR_DEPS: # Get results of bear's dependencies first deps_results = dict() for bear in local_bear.BEAR_DEPS: uut = bear(local_bear.section, queue.Queue()) deps_results[bear.name] = get_results(uut, lines, filename, force_linebreaks, create_tempfile, tempfile_kwargs, settings) else: deps_results = None with prepare_file(lines, filename, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs) as (file, fname), \ execute_bear(local_bear, fname, file, dependency_results=deps_results, **local_bear.get_metadata().filter_parameters(settings) ) as bear_output: return bear_output class LocalBearTestHelper(unittest.TestCase): """ This is a helper class for simplification of testing of local bears. Please note that all abstraction will prepare the lines so you don't need to do that if you use them. If you miss some methods, get in contact with us, we'll be happy to help! """ def check_validity(self, local_bear, lines, filename=None, valid=True, force_linebreaks=True, create_tempfile=True, tempfile_kwargs={}, settings={}): """ Asserts that a check of the given lines with the given local bear either yields or does not yield any results. :param local_bear: The local bear to check with. :param lines: The lines to check. (List of strings) :param filename: The filename, if it matters. :param valid: Whether the lines are valid or not. :param force_linebreaks: Whether to append newlines at each line if needed. (Bears expect a \\n for every line) :param create_tempfile: Whether to save lines in tempfile if needed. :param tempfile_kwargs: Kwargs passed to tempfile.mkstemp(). """ if valid: self.check_results(local_bear, lines, results=[], filename=filename, check_order=True, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs, settings=settings, ) else: return self.check_invalidity(local_bear, lines, filename=filename, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs, settings=settings, ) def check_invalidity(self, local_bear, lines, filename=None, force_linebreaks=True, create_tempfile=True, tempfile_kwargs={}, settings={}): """ Asserts that a check of the given lines with the given local bear yields results. :param local_bear: The local bear to check with. :param lines: The lines to check. (List of strings) :param filename: The filename, if it matters. :param force_linebreaks: Whether to append newlines at each line if needed. (Bears expect a \\n for every line) :param create_tempfile: Whether to save lines in tempfile if needed. :param tempfile_kwargs: Kwargs passed to tempfile.mkstemp(). """ assert isinstance(self, unittest.TestCase) self.assertIsInstance(local_bear, LocalBear, msg='The given bear is not a local bear.') self.assertIsInstance(lines, (list, tuple), msg='The given lines are not a list.') bear_output = get_results(local_bear, lines, filename=filename, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs, settings=settings, ) msg = ("The local bear '{}' yields no result although it " 'should.'.format(local_bear.__class__.__name__)) self.assertNotEqual(len(bear_output), 0, msg=msg) return bear_output def check_results(self, local_bear, lines, results, filename=None, check_order=False, force_linebreaks=True, create_tempfile=True, tempfile_kwargs={}, settings={}): """ Asserts that a check of the given lines with the given local bear does yield exactly the given results. :param local_bear: The local bear to check with. :param lines: The lines to check. (List of strings) :param results: The expected list of results. :param filename: The filename, if it matters. :param force_linebreaks: Whether to append newlines at each line if needed. (Bears expect a \\n for every line) :param create_tempfile: Whether to save lines in tempfile if needed. :param tempfile_kwargs: Kwargs passed to tempfile.mkstemp(). :param settings: A dictionary of keys and values (both strings) from which settings will be created that will be made available for the tested bear. """ assert isinstance(self, unittest.TestCase) self.assertIsInstance(local_bear, LocalBear, msg='The given bear is not a local bear.') self.assertIsInstance(lines, (list, tuple), msg='The given lines are not a list.') self.assertIsInstance(results, list, msg='The given results are not a list.') if results in [[], ()]: msg = ("The local bear '{}' yields a result although it " "shouldn't.".format(local_bear.__class__.__name__)) check_order = True else: msg = ("The local bear '{}' doesn't yield the right results." .format(local_bear.__class__.__name__)) if check_order: msg += ' Or the order may be wrong.' bear_output = get_results(local_bear, lines, filename=filename, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs, settings=settings) if not check_order: self.assertEqual(sorted(bear_output), sorted(results), msg=msg) else: self.assertEqual(bear_output, results, msg=msg) return bear_output def check_line_result_count(self, local_bear, lines, results_num, filename=None, force_linebreaks=True, create_tempfile=True, tempfile_kwargs={}, settings={}): """ Check many results for each line. :param local_bear: The local bear to check with. :param lines: The lines to check. (List of strings) :param results_num: The expected list of many results each line. :param filename: The filename, if it matters. :param force_linebreaks: Whether to append newlines at each line if needed. (Bears expect a \\n for every line) :param create_tempfile: Whether to save lines in tempfile if needed. :param tempfile_kwargs: Kwargs passed to tempfile.mkstemp(). :param settings: A dictionary of keys and values (both strings) from which settings will be created that will be made available for the tested bear. """ modified_lines = [] for line in lines: stripped_line = line.strip() if stripped_line == '' or stripped_line.startswith('#'): continue modified_lines.append(line) for line, num in zip(modified_lines, results_num): bear_output = get_results(local_bear, [line], filename=filename, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs, settings=settings) self.assertEqual(num, len(bear_output)) def verify_local_bear(bear, valid_files, invalid_files, filename=None, settings={}, force_linebreaks=True, create_tempfile=True, timeout=None, tempfile_kwargs={}): """ Generates a test for a local bear by checking the given valid and invalid file contents. Simply use it on your module level like: YourTestName = verify_local_bear(YourBear, (['valid line'],), (['invalid line'],)) :param bear: The Bear class to test. :param valid_files: An iterable of files as a string list that won't yield results. :param invalid_files: An iterable of files as a string list that must yield results. :param filename: The filename to use for valid and invalid files. :param settings: A dictionary of keys and values (both string) from which settings will be created that will be made available for the tested bear. :param force_linebreaks: Whether to append newlines at each line if needed. (Bears expect a \\n for every line) :param create_tempfile: Whether to save lines in tempfile if needed. :param timeout: The total time to run the test for. :param tempfile_kwargs: Kwargs passed to tempfile.mkstemp() if tempfile needs to be created. :return: A unittest.TestCase object. """ @pytest.mark.timeout(timeout) @generate_skip_decorator(bear) class LocalBearTest(LocalBearTestHelper): def setUp(self): self.section = Section('name') self.uut = bear(self.section, queue.Queue()) for name, value in settings.items(): self.section.append(Setting(name, value)) def test_valid_files(self): self.assertIsInstance(valid_files, (list, tuple)) for file in valid_files: self.check_validity(self.uut, file.splitlines(keepends=True), filename, valid=True, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs) def test_invalid_files(self): self.assertIsInstance(invalid_files, (list, tuple)) for file in invalid_files: self.check_validity(self.uut, file.splitlines(keepends=True), filename, valid=False, force_linebreaks=force_linebreaks, create_tempfile=create_tempfile, tempfile_kwargs=tempfile_kwargs) return LocalBearTest
{ "content_hash": "d43da1c17714817571e140a9b3ba88f1", "timestamp": "", "source": "github", "line_count": 346, "max_line_length": 79, "avg_line_length": 45.283236994219656, "alnum_prop": 0.4951493489915752, "repo_name": "IPMITMO/statan", "id": "958c450c393c5489b16ee0eb513d2485e94c3a62", "size": "15668", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "coala/coalib/testing/LocalBearTestHelper.py", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "101" }, { "name": "Batchfile", "bytes": "10931" }, { "name": "C", "bytes": "28190" }, { "name": "C#", "bytes": "45474" }, { "name": "C++", "bytes": "335" }, { "name": "CSS", "bytes": "6631" }, { "name": "Go", "bytes": "96" }, { "name": "HTML", "bytes": "1564" }, { "name": "Java", "bytes": "592" }, { "name": "JavaScript", "bytes": "472227" }, { "name": "Makefile", "bytes": "15304" }, { "name": "PHP", "bytes": "1804" }, { "name": "Python", "bytes": "2312447" }, { "name": "Ruby", "bytes": "447" }, { "name": "Shell", "bytes": "12706" } ], "symlink_target": "" }
__version__ = "0.0.1" from .interact import StaticInteract from .widgets import RadioWidget, RangeWidget, DropDownWidget
{ "content_hash": "4842593e9b49d40fd71fa8be3bfaf0f2", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 61, "avg_line_length": 30.5, "alnum_prop": 0.7786885245901639, "repo_name": "jakevdp/ipywidgets-static", "id": "eebe2cc357b75e4d1564479138951b80ab9210ae", "size": "122", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "ipywidgets/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Jupyter Notebook", "bytes": "608440" }, { "name": "Python", "bytes": "10929" } ], "symlink_target": "" }
import hashlib import os from eventlet import tpool import walkdir from .bep import protocol from .bep import messages NOTHING_SHA = hashlib.sha256().digest() def _hash_file(stream): blocks = [] add_block = blocks.append offset = 0 data = stream.read(protocol.BLOCK_SIZE) while data: sha = hashlib.sha256(data).digest() size = len(data) add_block(messages.BlockInfo(sha, size, offset)) offset = offset + size data = stream.read(protocol.BLOCK_SIZE) if not blocks: add_block(messages.BlockInfo(NOTHING_SHA, 0, 0)) return blocks def hash_file(file_path): with open(file_path, mode='rb') as stream: return _hash_file(stream) class Walker(object): def __init__(self, path): self.path = path def walk(self): walk_iter = walkdir.filtered_walk(self.path) for dirpath, subdirs, files in walk_iter: for fname in files: real_path = os.path.abspath(os.path.join(dirpath, fname)) if os.path.islink(real_path): target = os.readlink(real_path)
{ "content_hash": "8bf45c1abb6cd3f593b4965e81905f55", "timestamp": "", "source": "github", "line_count": 51, "max_line_length": 73, "avg_line_length": 22.176470588235293, "alnum_prop": 0.614500442086649, "repo_name": "jkoelker/syncthang", "id": "e348734f3e0cb3916341e0db415608c2d034e48d", "size": "1156", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "syncthang/fs.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "30755" } ], "symlink_target": "" }
''' Given a valgrind XML file, parses errors and uniques them.''' import gdb_helper from collections import defaultdict import hashlib import logging import optparse import os import re import subprocess import sys import time from xml.dom.minidom import parse from xml.parsers.expat import ExpatError import common # Global symbol table (yuck) TheAddressTable = None # These are regexps that define functions (using C++ mangled names) # we don't want to see in stack traces while pretty printing # or generating suppressions. # Just stop printing the stack/suppression frames when the current one # matches any of these. _BORING_CALLERS = common.BoringCallers(mangled=True, use_re_wildcards=True) def getTextOf(top_node, name): ''' Returns all text in all DOM nodes with a certain |name| that are children of |top_node|. ''' text = "" for nodes_named in top_node.getElementsByTagName(name): text += "".join([node.data for node in nodes_named.childNodes if node.nodeType == node.TEXT_NODE]) return text def getCDATAOf(top_node, name): ''' Returns all CDATA in all DOM nodes with a certain |name| that are children of |top_node|. ''' text = "" for nodes_named in top_node.getElementsByTagName(name): text += "".join([node.data for node in nodes_named.childNodes if node.nodeType == node.CDATA_SECTION_NODE]) if (text == ""): return None return text def shortenFilePath(source_dir, directory): '''Returns a string with the string prefix |source_dir| removed from |directory|.''' prefixes_to_cut = ["build/src/", "valgrind/coregrind/", "out/Release/../../"] if source_dir: prefixes_to_cut.append(source_dir) for p in prefixes_to_cut: index = directory.rfind(p) if index != -1: directory = directory[index + len(p):] return directory # Constants that give real names to the abbreviations in valgrind XML output. INSTRUCTION_POINTER = "ip" OBJECT_FILE = "obj" FUNCTION_NAME = "fn" SRC_FILE_DIR = "dir" SRC_FILE_NAME = "file" SRC_LINE = "line" def gatherFrames(node, source_dir): frames = [] for frame in node.getElementsByTagName("frame"): frame_dict = { INSTRUCTION_POINTER : getTextOf(frame, INSTRUCTION_POINTER), OBJECT_FILE : getTextOf(frame, OBJECT_FILE), FUNCTION_NAME : getTextOf(frame, FUNCTION_NAME), SRC_FILE_DIR : shortenFilePath( source_dir, getTextOf(frame, SRC_FILE_DIR)), SRC_FILE_NAME : getTextOf(frame, SRC_FILE_NAME), SRC_LINE : getTextOf(frame, SRC_LINE) } # Ignore this frame and all the following if it's a "boring" function. enough_frames = False for regexp in _BORING_CALLERS: if re.match("^%s$" % regexp, frame_dict[FUNCTION_NAME]): enough_frames = True break if enough_frames: break frames += [frame_dict] global TheAddressTable if TheAddressTable != None and frame_dict[SRC_LINE] == "": # Try using gdb TheAddressTable.Add(frame_dict[OBJECT_FILE], frame_dict[INSTRUCTION_POINTER]) return frames class ValgrindError: ''' Takes a <DOM Element: error> node and reads all the data from it. A ValgrindError is immutable and is hashed on its pretty printed output. ''' def __init__(self, source_dir, error_node, commandline, testcase): ''' Copies all the relevant information out of the DOM and into object properties. Args: error_node: The <error></error> DOM node we're extracting from. source_dir: Prefix that should be stripped from the <dir> node. commandline: The command that was run under valgrind testcase: The test case name, if known. ''' # Valgrind errors contain one <what><stack> pair, plus an optional # <auxwhat><stack> pair, plus an optional <origin><what><stack></origin>, # plus (since 3.5.0) a <suppression></suppression> pair. # (Origin is nicely enclosed; too bad the other two aren't.) # The most common way to see all three in one report is # a syscall with a parameter that points to uninitialized memory, e.g. # Format: # <error> # <unique>0x6d</unique> # <tid>1</tid> # <kind>SyscallParam</kind> # <what>Syscall param write(buf) points to uninitialised byte(s)</what> # <stack> # <frame> # ... # </frame> # </stack> # <auxwhat>Address 0x5c9af4f is 7 bytes inside a block of ...</auxwhat> # <stack> # <frame> # ... # </frame> # </stack> # <origin> # <what>Uninitialised value was created by a heap allocation</what> # <stack> # <frame> # ... # </frame> # </stack> # </origin> # <suppression> # <sname>insert_a_suppression_name_here</sname> # <skind>Memcheck:Param</skind> # <skaux>write(buf)</skaux> # <sframe> <fun>__write_nocancel</fun> </sframe> # ... # <sframe> <fun>main</fun> </sframe> # <rawtext> # <![CDATA[ # { # <insert_a_suppression_name_here> # Memcheck:Param # write(buf) # fun:__write_nocancel # ... # fun:main # } # ]]> # </rawtext> # </suppression> # </error> # # Each frame looks like this: # <frame> # <ip>0x83751BC</ip> # <obj>/data/dkegel/chrome-build/src/out/Release/base_unittests</obj> # <fn>_ZN7testing8internal12TestInfoImpl7RunTestEPNS_8TestInfoE</fn> # <dir>/data/dkegel/chrome-build/src/testing/gtest/src</dir> # <file>gtest-internal-inl.h</file> # <line>655</line> # </frame> # although the dir, file, and line elements are missing if there is # no debug info. self._kind = getTextOf(error_node, "kind") self._backtraces = [] self._suppression = None self._commandline = commandline self._testcase = testcase self._additional = [] # Iterate through the nodes, parsing <what|auxwhat><stack> pairs. description = None for node in error_node.childNodes: if node.localName == "what" or node.localName == "auxwhat": description = "".join([n.data for n in node.childNodes if n.nodeType == n.TEXT_NODE]) elif node.localName == "xwhat": description = getTextOf(node, "text") elif node.localName == "stack": assert description self._backtraces.append([description, gatherFrames(node, source_dir)]) description = None elif node.localName == "origin": description = getTextOf(node, "what") stack = node.getElementsByTagName("stack")[0] frames = gatherFrames(stack, source_dir) self._backtraces.append([description, frames]) description = None stack = None frames = None elif description and node.localName != None: # The lastest description has no stack, e.g. "Address 0x28 is unknown" self._additional.append(description) description = None if node.localName == "suppression": self._suppression = getCDATAOf(node, "rawtext"); def __str__(self): ''' Pretty print the type and backtrace(s) of this specific error, including suppression (which is just a mangled backtrace).''' output = "" output += "\n" # Make sure the ### is at the beginning of line. output += "### BEGIN MEMORY TOOL REPORT (error hash=#%016X#)\n" % \ self.ErrorHash() if (self._commandline): output += self._commandline + "\n" output += self._kind + "\n" for backtrace in self._backtraces: output += backtrace[0] + "\n" filter = subprocess.Popen("c++filt -n", stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, close_fds=True) buf = "" for frame in backtrace[1]: buf += (frame[FUNCTION_NAME] or frame[INSTRUCTION_POINTER]) + "\n" (stdoutbuf, stderrbuf) = filter.communicate(buf.encode('latin-1')) demangled_names = stdoutbuf.split("\n") i = 0 for frame in backtrace[1]: output += (" " + demangled_names[i]) i = i + 1 global TheAddressTable if TheAddressTable != None and frame[SRC_FILE_DIR] == "": # Try using gdb foo = TheAddressTable.GetFileLine(frame[OBJECT_FILE], frame[INSTRUCTION_POINTER]) if foo[0] != None: output += (" (" + foo[0] + ":" + foo[1] + ")") elif frame[SRC_FILE_DIR] != "": output += (" (" + frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME] + ":" + frame[SRC_LINE] + ")") else: output += " (" + frame[OBJECT_FILE] + ")" output += "\n" for additional in self._additional: output += additional + "\n" assert self._suppression != None, "Your Valgrind doesn't generate " \ "suppressions - is it too old?" if self._testcase: output += "The report came from the `%s` test.\n" % self._testcase output += "Suppression (error hash=#%016X#):\n" % self.ErrorHash() output += (" For more info on using suppressions see " "http://dev.chromium.org/developers/tree-sheriffs/sheriff-details-chromium/memory-sheriff#TOC-Suppressing-memory-reports") # Widen suppression slightly to make portable between mac and linux # TODO(timurrrr): Oops, these transformations should happen # BEFORE calculating the hash! supp = self._suppression; supp = supp.replace("fun:_Znwj", "fun:_Znw*") supp = supp.replace("fun:_Znwm", "fun:_Znw*") supp = supp.replace("fun:_Znaj", "fun:_Zna*") supp = supp.replace("fun:_Znam", "fun:_Zna*") # Make suppressions even less platform-dependent. for sz in [1, 2, 4, 8]: supp = supp.replace("Memcheck:Addr%d" % sz, "Memcheck:Unaddressable") supp = supp.replace("Memcheck:Value%d" % sz, "Memcheck:Uninitialized") supp = supp.replace("Memcheck:Cond", "Memcheck:Uninitialized") # Split into lines so we can enforce length limits supplines = supp.split("\n") supp = None # to avoid re-use # Truncate at line 26 (VG_MAX_SUPP_CALLERS plus 2 for name and type) # or at the first 'boring' caller. # (https://bugs.kde.org/show_bug.cgi?id=199468 proposes raising # VG_MAX_SUPP_CALLERS, but we're probably fine with it as is.) newlen = min(26, len(supplines)); # Drop boring frames and all the following. enough_frames = False for frameno in range(newlen): for boring_caller in _BORING_CALLERS: if re.match("^ +fun:%s$" % boring_caller, supplines[frameno]): newlen = frameno enough_frames = True break if enough_frames: break if (len(supplines) > newlen): supplines = supplines[0:newlen] supplines.append("}") for frame in range(len(supplines)): # Replace the always-changing anonymous namespace prefix with "*". m = re.match("( +fun:)_ZN.*_GLOBAL__N_.*\.cc_" + "[0-9a-fA-F]{8}_[0-9a-fA-F]{8}(.*)", supplines[frame]) if m: supplines[frame] = "*".join(m.groups()) output += "\n".join(supplines) + "\n" output += "### END MEMORY TOOL REPORT (error hash=#%016X#)\n" % \ self.ErrorHash() return output def UniqueString(self): ''' String to use for object identity. Don't print this, use str(obj) instead.''' rep = self._kind + " " for backtrace in self._backtraces: for frame in backtrace[1]: rep += frame[FUNCTION_NAME] if frame[SRC_FILE_DIR] != "": rep += frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME] else: rep += frame[OBJECT_FILE] return rep # This is a device-independent hash identifying the suppression. # By printing out this hash we can find duplicate reports between tests and # different shards running on multiple buildbots def ErrorHash(self): return int(hashlib.md5(self.UniqueString()).hexdigest()[:16], 16) def __hash__(self): return hash(self.UniqueString()) def __eq__(self, rhs): return self.UniqueString() == rhs def log_is_finished(f, force_finish): f.seek(0) prev_line = "" while True: line = f.readline() if line == "": if not force_finish: return False # Okay, the log is not finished but we can make it up to be parseable: if prev_line.strip() in ["</error>", "</errorcounts>", "</status>"]: f.write("</valgrindoutput>\n") return True return False if '</valgrindoutput>' in line: # Valgrind often has garbage after </valgrindoutput> upon crash. f.truncate() return True prev_line = line class MemcheckAnalyzer: ''' Given a set of Valgrind XML files, parse all the errors out of them, unique them and output the results.''' SANITY_TEST_SUPPRESSIONS = { "Memcheck sanity test 01 (memory leak).": 1, "Memcheck sanity test 02 (malloc/read left).": 1, "Memcheck sanity test 03 (malloc/read right).": 1, "Memcheck sanity test 04 (malloc/write left).": 1, "Memcheck sanity test 05 (malloc/write right).": 1, "Memcheck sanity test 06 (new/read left).": 1, "Memcheck sanity test 07 (new/read right).": 1, "Memcheck sanity test 08 (new/write left).": 1, "Memcheck sanity test 09 (new/write right).": 1, "Memcheck sanity test 10 (write after free).": 1, "Memcheck sanity test 11 (write after delete).": 1, "Memcheck sanity test 12 (array deleted without []).": 1, "Memcheck sanity test 13 (single element deleted with []).": 1, "Memcheck sanity test 14 (malloc/read uninit).": 1, "Memcheck sanity test 15 (new/read uninit).": 1, } # Max time to wait for memcheck logs to complete. LOG_COMPLETION_TIMEOUT = 180.0 def __init__(self, source_dir, show_all_leaks=False, use_gdb=False): '''Create a parser for Memcheck logs. Args: source_dir: Path to top of source tree for this build show_all_leaks: Whether to show even less important leaks use_gdb: Whether to use gdb to resolve source filenames and line numbers in the report stacktraces ''' self._source_dir = source_dir self._show_all_leaks = show_all_leaks self._use_gdb = use_gdb # Contains the set of unique errors self._errors = set() # Contains the time when the we started analyzing the first log file. # This variable is used to skip incomplete logs after some timeout. self._analyze_start_time = None def Report(self, files, testcase, check_sanity=False): '''Reads in a set of files and prints Memcheck report. Args: files: A list of filenames. check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS ''' # Beyond the detailed errors parsed by ValgrindError above, # the xml file contain records describing suppressions that were used: # <suppcounts> # <pair> # <count>28</count> # <name>pango_font_leak_todo</name> # </pair> # <pair> # <count>378</count> # <name>bug_13243</name> # </pair> # </suppcounts # Collect these and print them at the end. # # With our patch for https://bugs.kde.org/show_bug.cgi?id=205000 in, # the file also includes records of the form # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj> # giving the filename and load address of each binary that was mapped # into the process. global TheAddressTable if self._use_gdb: TheAddressTable = gdb_helper.AddressTable() else: TheAddressTable = None cur_report_errors = set() suppcounts = defaultdict(int) badfiles = set() if self._analyze_start_time == None: self._analyze_start_time = time.time() start_time = self._analyze_start_time parse_failed = False for file in files: # Wait up to three minutes for valgrind to finish writing all files, # but after that, just skip incomplete files and warn. f = open(file, "r+") pid = re.match(".*\.([0-9]+)$", file) if pid: pid = pid.groups()[0] found = False running = True firstrun = True skip = False origsize = os.path.getsize(file) while (running and not found and not skip and (firstrun or ((time.time() - start_time) < self.LOG_COMPLETION_TIMEOUT))): firstrun = False f.seek(0) if pid: # Make sure the process is still running so we don't wait for # 3 minutes if it was killed. See http://crbug.com/17453 ps_out = subprocess.Popen("ps p %s" % pid, shell=True, stdout=subprocess.PIPE).stdout if len(ps_out.readlines()) < 2: running = False else: skip = True running = False found = log_is_finished(f, False) if not running and not found: logging.warn("Valgrind process PID = %s is not running but its " "XML log has not been finished correctly.\n" "Make it up by adding some closing tags manually." % pid) found = log_is_finished(f, not running) if running and not found: time.sleep(1) f.close() if not found: badfiles.add(file) else: newsize = os.path.getsize(file) if origsize > newsize+1: logging.warn(str(origsize - newsize) + " bytes of junk were after </valgrindoutput> in %s!" % file) try: parsed_file = parse(file); except ExpatError, e: parse_failed = True logging.warn("could not parse %s: %s" % (file, e)) lineno = e.lineno - 1 context_lines = 5 context_start = max(0, lineno - context_lines) context_end = lineno + context_lines + 1 context_file = open(file, "r") for i in range(0, context_start): context_file.readline() for i in range(context_start, context_end): context_data = context_file.readline().rstrip() if i != lineno: logging.warn(" %s" % context_data) else: logging.warn("> %s" % context_data) context_file.close() continue if TheAddressTable != None: load_objs = parsed_file.getElementsByTagName("load_obj") for load_obj in load_objs: obj = getTextOf(load_obj, "obj") ip = getTextOf(load_obj, "ip") TheAddressTable.AddBinaryAt(obj, ip) commandline = None preamble = parsed_file.getElementsByTagName("preamble")[0]; for node in preamble.getElementsByTagName("line"): if node.localName == "line": for x in node.childNodes: if x.nodeType == node.TEXT_NODE and "Command" in x.data: commandline = x.data break raw_errors = parsed_file.getElementsByTagName("error") for raw_error in raw_errors: # Ignore "possible" leaks for now by default. if (self._show_all_leaks or getTextOf(raw_error, "kind") != "Leak_PossiblyLost"): error = ValgrindError(self._source_dir, raw_error, commandline, testcase) if error not in cur_report_errors: # We haven't seen such errors doing this report yet... if error in self._errors: # ... but we saw it in earlier reports, e.g. previous UI test cur_report_errors.add("This error was already printed in " "some other test, see 'hash=#%016X#'" % \ error.ErrorHash()) else: # ... and we haven't seen it in other tests as well self._errors.add(error) cur_report_errors.add(error) suppcountlist = parsed_file.getElementsByTagName("suppcounts") if len(suppcountlist) > 0: suppcountlist = suppcountlist[0] for node in suppcountlist.getElementsByTagName("pair"): count = getTextOf(node, "count"); name = getTextOf(node, "name"); suppcounts[name] += int(count) if len(badfiles) > 0: logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles)) for file in badfiles: logging.warn("Last 20 lines of %s :" % file) os.system("tail -n 20 '%s' 1>&2" % file) if parse_failed: logging.error("FAIL! Couldn't parse Valgrind output file") return -2 common.PrintUsedSuppressionsList(suppcounts) retcode = 0 if cur_report_errors: logging.error("FAIL! There were %s errors: " % len(cur_report_errors)) if TheAddressTable != None: TheAddressTable.ResolveAll() for error in cur_report_errors: logging.error(error) retcode = -1 # Report tool's insanity even if there were errors. if check_sanity: remaining_sanity_supp = MemcheckAnalyzer.SANITY_TEST_SUPPRESSIONS for (name, count) in suppcounts.iteritems(): # Workaround for http://crbug.com/334074 if (name in remaining_sanity_supp and remaining_sanity_supp[name] <= count): del remaining_sanity_supp[name] if remaining_sanity_supp: logging.error("FAIL! Sanity check failed!") logging.info("The following test errors were not handled: ") for (name, count) in remaining_sanity_supp.iteritems(): logging.info(" * %dx %s" % (count, name)) retcode = -3 if retcode != 0: return retcode logging.info("PASS! No errors found!") return 0 def _main(): '''For testing only. The MemcheckAnalyzer class should be imported instead.''' parser = optparse.OptionParser("usage: %prog [options] <files to analyze>") parser.add_option("", "--source-dir", help="path to top of source tree for this build" "(used to normalize source paths in baseline)") (options, args) = parser.parse_args() if len(args) == 0: parser.error("no filename specified") filenames = args analyzer = MemcheckAnalyzer(options.source_dir, use_gdb=True) return analyzer.Report(filenames, None) if __name__ == "__main__": sys.exit(_main())
{ "content_hash": "ee4f434382ed15e39dfd233876929252", "timestamp": "", "source": "github", "line_count": 633, "max_line_length": 137, "avg_line_length": 35.88309636650869, "alnum_prop": 0.597913181297878, "repo_name": "wangcy6/storm_app", "id": "80e85eb4ab1d4d94c2087d2fa366c5f2fa5987a5", "size": "23153", "binary": false, "copies": "6", "ref": "refs/heads/master", "path": "frame/c++/webrtc-master/tools_webrtc/valgrind/memcheck_analyze.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ActionScript", "bytes": "86225" }, { "name": "Assembly", "bytes": "4834" }, { "name": "Batchfile", "bytes": "50141" }, { "name": "C", "bytes": "9700081" }, { "name": "C#", "bytes": "1587148" }, { "name": "C++", "bytes": "14378340" }, { "name": "CMake", "bytes": "756439" }, { "name": "CSS", "bytes": "59712" }, { "name": "Clojure", "bytes": "535480" }, { "name": "DTrace", "bytes": "147" }, { "name": "Fancy", "bytes": "6234" }, { "name": "FreeMarker", "bytes": "3512" }, { "name": "Go", "bytes": "27069" }, { "name": "Groovy", "bytes": "1755" }, { "name": "HTML", "bytes": "1235479" }, { "name": "Java", "bytes": "41653938" }, { "name": "JavaScript", "bytes": "260093" }, { "name": "Lua", "bytes": "11887" }, { "name": "M4", "bytes": "96283" }, { "name": "Makefile", "bytes": "977879" }, { "name": "NSIS", "bytes": "6522" }, { "name": "Objective-C", "bytes": "324010" }, { "name": "PHP", "bytes": "348909" }, { "name": "Perl", "bytes": "182487" }, { "name": "PowerShell", "bytes": "19465" }, { "name": "Prolog", "bytes": "243" }, { "name": "Python", "bytes": "3649738" }, { "name": "QML", "bytes": "9975" }, { "name": "QMake", "bytes": "63106" }, { "name": "Roff", "bytes": "12319" }, { "name": "Ruby", "bytes": "858066" }, { "name": "Scala", "bytes": "5203874" }, { "name": "Shell", "bytes": "714435" }, { "name": "Smarty", "bytes": "1047" }, { "name": "Swift", "bytes": "3486" }, { "name": "Tcl", "bytes": "492616" }, { "name": "Thrift", "bytes": "31449" }, { "name": "XS", "bytes": "20183" }, { "name": "XSLT", "bytes": "8784" } ], "symlink_target": "" }
import requests from allauth.socialaccount import providers from allauth.socialaccount.providers.oauth2.views import (OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView) from .provider import LinkedInOAuth2Provider class LinkedInOAuth2Adapter(OAuth2Adapter): provider_id = LinkedInOAuth2Provider.id access_token_url = 'https://api.linkedin.com/uas/oauth2/accessToken' authorize_url = 'https://www.linkedin.com/uas/oauth2/authorization' profile_url = 'https://api.linkedin.com/v1/people/~' supports_state = False # See: # http://developer.linkedin.com/forum/unauthorized-invalid-or-expired-token-immediately-after-receiving-oauth2-token?page=1 # noqa access_token_method = 'GET' def complete_login(self, request, app, token, **kwargs): extra_data = self.get_user_info(token) return self.get_provider().sociallogin_from_response(request, extra_data) def get_user_info(self, token): fields = providers.registry \ .by_id(LinkedInOAuth2Provider.id) \ .get_profile_fields() url = self.profile_url + ':(%s)?format=json' % ','.join(fields) resp = requests.get(url, params={'oauth2_access_token': token.token}) return resp.json() oauth2_login = OAuth2LoginView.adapter_view(LinkedInOAuth2Adapter) oauth2_callback = OAuth2CallbackView.adapter_view(LinkedInOAuth2Adapter)
{ "content_hash": "6900938ab66d0add347e5505335c58dc", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 134, "avg_line_length": 46.6875, "alnum_prop": 0.6680053547523427, "repo_name": "agconti/njode", "id": "b06996e443f8c56acf8f24f030443bfcef220224", "size": "1494", "binary": false, "copies": "9", "ref": "refs/heads/master", "path": "env/lib/python2.7/site-packages/allauth/socialaccount/providers/linkedin_oauth2/views.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "5979" }, { "name": "CSS", "bytes": "137839" }, { "name": "JavaScript", "bytes": "202018" }, { "name": "Makefile", "bytes": "9794" }, { "name": "Python", "bytes": "17227922" }, { "name": "Shell", "bytes": "8848" }, { "name": "TeX", "bytes": "56837" } ], "symlink_target": "" }
import httplib import k.stdlib.urllib2.urlopen_error_message import logging import mock import socket import unittest import urllib2 from StringIO import StringIO from k.stdlib.urllib2 import urlopen class UrlopenTests(unittest.TestCase): def setUp(self): self.response_text = """<!doctype html> <html> <head><title>404 Not Found</title></head> <body bgcolor="white"> <center><h1>404 Not Found</h1></center> <hr><center>nginx/0.7.65</center> </body> </html> """ self.url = "http://www.a.com/" self.fp = StringIO("<!doctype html>\n<html>\n</html>\n") self.headers = httplib.HTTPMessage(StringIO("""Date: Fri, 12 Apr 2013 14:43:25 GMT Server: Apache/2.2.3 (CentOS) Last-Modified: Fri, 04 Jan 2013 01:17:22 GMT Vary: Accept-Encoding Connection: close Transfer-Encoding: chunked Content-Type: text/html; charset=UTF-8 """)) def tearDown(self): pass def test_urlopen_success(self): response = urllib2.addinfourl(self.fp, self.headers, self.url) response.code = 200 response.msg = "OK" with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.return_value = response urlopen(self.url) def test_urlopen_httperror(self): url = self.url error = urllib2.HTTPError(self.url, 404, "Not Found", self.headers, self.fp) #print error.msg # Not Found with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.side_effect = error try: urlopen(url) except urllib2.HTTPError as err: #print err.msg # Not Found (http://www.a.com/) assert err.msg.endswith("({0})".format(self.url)) else: self.fail() def test_urlopen_httperror_2(self): url = urllib2.Request(self.url) error = urllib2.HTTPError(self.url, 404, "Not Found", self.headers, self.fp) #print error.msg # Not Found with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.side_effect = error try: urlopen(url) except urllib2.HTTPError as err: #print err.msg # Not Found (http://www.a.com/) assert err.msg.endswith("({0})".format(self.url)) else: self.fail() def test_urlopen_httperror_3(self): url = self.url fp = StringIO("<!doctype html>\n<html>\n<title>404 Not Found</title>\n</html>\n") error = urllib2.HTTPError(self.url, 404, "Not Found", self.headers, fp) #print error.msg # Not Found with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.side_effect = error try: urlopen(url) except urllib2.HTTPError as err: #print err.msg # Not Found (http://www.a.com/) (Proxy couldn't match the route.) assert err.msg.endswith("({0}) (Proxy couldn't match the route.)".format(self.url)) else: self.fail() def test_urlopen_httperror_4(self): url = self.url fp = StringIO('{"status": "404 Not Found"}') error = urllib2.HTTPError(self.url, 404, "Not Found", self.headers, fp) #print error.msg # Not Found with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.side_effect = error try: urlopen(url) except urllib2.HTTPError as err: print err.msg # Not Found (http://www.a.com/) (Application couldn't match the route.) assert err.msg.endswith("({0}) (Application couldn't match the route.)".format(self.url)) else: self.fail() def test_urlopen_urlerror(self): url = self.url error = urllib2.URLError(socket.gaierror(-2, 'Name or service not known'),) #print error.args # (gaierror(-2, 'Name or service not known'),) #print error.reason # [Errno -2] Name or service not known with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.side_effect = error try: urlopen(url) except urllib2.URLError as err: #print err.args # (gaierror(-2, 'Name or service not known'), 'http://www.a.com/') #print err.reason # [Errno -2] Name or service not known (http://www.a.com/) assert err.args[1] == self.url assert err.reason.endswith("({0})".format(self.url)) else: self.fail() def test_urlopen_urlerror_2(self): url = urllib2.Request(self.url) error = urllib2.URLError(socket.gaierror(-2, 'Name or service not known'),) #print error.args # (gaierror(-2, 'Name or service not known'),) #print error.reason # [Errno -2] Name or service not known with mock.patch('urllib2.urlopen') as mock_urlopen: mock_urlopen.side_effect = error try: urlopen(url) except urllib2.URLError as err: #print err.args # (gaierror(-2, 'Name or service not known'), 'http://www.a.com/') #print err.reason # [Errno -2] Name or service not known (http://www.a.com/) assert err.args[1] == self.url assert err.reason.endswith("({0})".format(self.url)) else: self.fail() def test__dump_response(self): response = urllib2.addinfourl(self.fp, self.headers, self.url) response.code = 200 response.msg = "OK" with mock.patch('logging.debug') as mock_debug: k.stdlib.urllib2.urlopen_error_message._dump_response(response) def test__process_response(self): response = mock.Mock() response.read.return_value = self.response_text assert "__iter__" not in dir(response) assert "fileno" not in dir(response) assert "fp" not in dir(response) assert "next" not in dir(response) assert "readline" not in dir(response) assert "readlines" not in dir(response) k.stdlib.urllib2.urlopen_error_message._process_response(response) assert "__iter__" in dir(response) assert "fileno" in dir(response) assert "fp" in dir(response) assert "next" in dir(response) assert "readline" in dir(response) assert "readlines" in dir(response) assert response.fileno() is None expected = [ '<!doctype html>\n', '<html>\n', '<head><title>404 Not Found</title></head>\n', '<body bgcolor="white">\n', '<center><h1>404 Not Found</h1></center>\n', '<hr><center>nginx/0.7.65</center>\n', '</body>\n', '</html>\n', ] actual = response.readlines() assert expected == actual assert response.fileno() is None def test__process_response_2(self): response = mock.Mock() response.read.return_value = self.response_text assert "__iter__" not in dir(response) assert "fileno" not in dir(response) assert "fp" not in dir(response) assert "next" not in dir(response) assert "readline" not in dir(response) assert "readlines" not in dir(response) with mock.patch('k.stdlib.urllib2.urlopen_error_message.StringIO') as mock_stringio: fp = mock.Mock(spec=['read', 'readline', 'fileno']) fp.read = lambda: None fp.readline = lambda: None fp.fileno = lambda: 0 mock_stringio.return_value = fp k.stdlib.urllib2.urlopen_error_message._process_response(response) assert "__iter__" not in dir(response) assert "fileno" in dir(response) assert "fp" in dir(response) assert "next" not in dir(response) assert "readline" in dir(response) assert "readlines" in dir(response) try: response.readlines() except NotImplementedError: pass else: self.fail() assert response.fileno() == 0
{ "content_hash": "587908e446792109da2779f3c3e8fc3b", "timestamp": "", "source": "github", "line_count": 215, "max_line_length": 93, "avg_line_length": 32.097674418604655, "alnum_prop": 0.6826546877264165, "repo_name": "Knewton/k.stdlib", "id": "aba40a4c2d560b63303a239cc4cff2958138b337", "size": "6901", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "k/stdlib/urllib2/tests/test_urlopen_error_message.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "195740" } ], "symlink_target": "" }
import torch import numpy as np path = './checkpoints/linknet/model_best.pth.tar' c = torch.load(path) print c.keys() print c['state_dict']
{ "content_hash": "d46edca4f261ab34e31fcb8fdeef3637", "timestamp": "", "source": "github", "line_count": 8, "max_line_length": 49, "avg_line_length": 17.75, "alnum_prop": 0.7183098591549296, "repo_name": "hzh8311/project", "id": "cb48a11785363d880718eadba4812956db98132d", "size": "142", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "demo.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "89279" } ], "symlink_target": "" }
""" The tests exercise the casting machinery in a more low-level manner. The reason is mostly to test a new implementation of the casting machinery. Unlike most tests in NumPy, these are closer to unit-tests rather than integration tests. """ import pytest import textwrap import enum import itertools import random import numpy as np from numpy.lib.stride_tricks import as_strided from numpy.testing import assert_array_equal from numpy.core._multiarray_umath import _get_castingimpl as get_castingimpl # Simple skips object, parametric and long double (unsupported by struct) simple_dtypes = "?bhilqBHILQefdFD" if np.dtype("l").itemsize != np.dtype("q").itemsize: # Remove l and L, the table was generated with 64bit linux in mind. simple_dtypes = simple_dtypes.replace("l", "").replace("L", "") simple_dtypes = [type(np.dtype(c)) for c in simple_dtypes] def simple_dtype_instances(): for dtype_class in simple_dtypes: dt = dtype_class() yield pytest.param(dt, id=str(dt)) if dt.byteorder != "|": dt = dt.newbyteorder() yield pytest.param(dt, id=str(dt)) def get_expected_stringlength(dtype): """Returns the string length when casting the basic dtypes to strings. """ if dtype == np.bool_: return 5 if dtype.kind in "iu": if dtype.itemsize == 1: length = 3 elif dtype.itemsize == 2: length = 5 elif dtype.itemsize == 4: length = 10 elif dtype.itemsize == 8: length = 20 else: raise AssertionError(f"did not find expected length for {dtype}") if dtype.kind == "i": length += 1 # adds one character for the sign return length # Note: Can't do dtype comparison for longdouble on windows if dtype.char == "g": return 48 elif dtype.char == "G": return 48 * 2 elif dtype.kind == "f": return 32 # also for half apparently. elif dtype.kind == "c": return 32 * 2 raise AssertionError(f"did not find expected length for {dtype}") class Casting(enum.IntEnum): no = 0 equiv = 1 safe = 2 same_kind = 3 unsafe = 4 cast_is_view = 1 << 16 def _get_cancast_table(): table = textwrap.dedent(""" X ? b h i l q B H I L Q e f d g F D G S U V O M m ? # = = = = = = = = = = = = = = = = = = = = = . = b . # = = = = . . . . . = = = = = = = = = = = . = h . ~ # = = = . . . . . ~ = = = = = = = = = = . = i . ~ ~ # = = . . . . . ~ ~ = = ~ = = = = = = . = l . ~ ~ ~ # # . . . . . ~ ~ = = ~ = = = = = = . = q . ~ ~ ~ # # . . . . . ~ ~ = = ~ = = = = = = . = B . ~ = = = = # = = = = = = = = = = = = = = = . = H . ~ ~ = = = ~ # = = = ~ = = = = = = = = = = . = I . ~ ~ ~ = = ~ ~ # = = ~ ~ = = ~ = = = = = = . = L . ~ ~ ~ ~ ~ ~ ~ ~ # # ~ ~ = = ~ = = = = = = . ~ Q . ~ ~ ~ ~ ~ ~ ~ ~ # # ~ ~ = = ~ = = = = = = . ~ e . . . . . . . . . . . # = = = = = = = = = = . . f . . . . . . . . . . . ~ # = = = = = = = = = . . d . . . . . . . . . . . ~ ~ # = ~ = = = = = = . . g . . . . . . . . . . . ~ ~ ~ # ~ ~ = = = = = . . F . . . . . . . . . . . . . . . # = = = = = = . . D . . . . . . . . . . . . . . . ~ # = = = = = . . G . . . . . . . . . . . . . . . ~ ~ # = = = = . . S . . . . . . . . . . . . . . . . . . # = = = . . U . . . . . . . . . . . . . . . . . . . # = = . . V . . . . . . . . . . . . . . . . . . . . # = . . O . . . . . . . . . . . . . . . . . . . . = # . . M . . . . . . . . . . . . . . . . . . . . = = # . m . . . . . . . . . . . . . . . . . . . . = = . # """).strip().split("\n") dtypes = [type(np.dtype(c)) for c in table[0][2::2]] convert_cast = {".": Casting.unsafe, "~": Casting.same_kind, "=": Casting.safe, "#": Casting.equiv, " ": -1} cancast = {} for from_dt, row in zip(dtypes, table[1:]): cancast[from_dt] = {} for to_dt, c in zip(dtypes, row[2::2]): cancast[from_dt][to_dt] = convert_cast[c] return cancast CAST_TABLE = _get_cancast_table() class TestChanges: """ These test cases exercise some behaviour changes """ @pytest.mark.parametrize("string", ["S", "U"]) @pytest.mark.parametrize("floating", ["e", "f", "d", "g"]) def test_float_to_string(self, floating, string): assert np.can_cast(floating, string) # 100 is long enough to hold any formatted floating assert np.can_cast(floating, f"{string}100") def test_to_void(self): # But in general, we do consider these safe: assert np.can_cast("d", "V") assert np.can_cast("S20", "V") # Do not consider it a safe cast if the void is too smaller: assert not np.can_cast("d", "V1") assert not np.can_cast("S20", "V1") assert not np.can_cast("U1", "V1") # Structured to unstructured is just like any other: assert np.can_cast("d,i", "V", casting="same_kind") # Unstructured void to unstructured is actually no cast at all: assert np.can_cast("V3", "V", casting="no") assert np.can_cast("V0", "V", casting="no") class TestCasting: size = 1500 # Best larger than NPY_LOWLEVEL_BUFFER_BLOCKSIZE * itemsize def get_data(self, dtype1, dtype2): if dtype2 is None or dtype1.itemsize >= dtype2.itemsize: length = self.size // dtype1.itemsize else: length = self.size // dtype2.itemsize # Assume that the base array is well enough aligned for all inputs. arr1 = np.empty(length, dtype=dtype1) assert arr1.flags.c_contiguous assert arr1.flags.aligned values = [random.randrange(-128, 128) for _ in range(length)] for i, value in enumerate(values): # Use item assignment to ensure this is not using casting: arr1[i] = value if dtype2 is None: if dtype1.char == "?": values = [bool(v) for v in values] return arr1, values if dtype2.char == "?": values = [bool(v) for v in values] arr2 = np.empty(length, dtype=dtype2) assert arr2.flags.c_contiguous assert arr2.flags.aligned for i, value in enumerate(values): # Use item assignment to ensure this is not using casting: arr2[i] = value return arr1, arr2, values def get_data_variation(self, arr1, arr2, aligned=True, contig=True): """ Returns a copy of arr1 that may be non-contiguous or unaligned, and a matching array for arr2 (although not a copy). """ if contig: stride1 = arr1.dtype.itemsize stride2 = arr2.dtype.itemsize elif aligned: stride1 = 2 * arr1.dtype.itemsize stride2 = 2 * arr2.dtype.itemsize else: stride1 = arr1.dtype.itemsize + 1 stride2 = arr2.dtype.itemsize + 1 max_size1 = len(arr1) * 3 * arr1.dtype.itemsize + 1 max_size2 = len(arr2) * 3 * arr2.dtype.itemsize + 1 from_bytes = np.zeros(max_size1, dtype=np.uint8) to_bytes = np.zeros(max_size2, dtype=np.uint8) # Sanity check that the above is large enough: assert stride1 * len(arr1) <= from_bytes.nbytes assert stride2 * len(arr2) <= to_bytes.nbytes if aligned: new1 = as_strided(from_bytes[:-1].view(arr1.dtype), arr1.shape, (stride1,)) new2 = as_strided(to_bytes[:-1].view(arr2.dtype), arr2.shape, (stride2,)) else: new1 = as_strided(from_bytes[1:].view(arr1.dtype), arr1.shape, (stride1,)) new2 = as_strided(to_bytes[1:].view(arr2.dtype), arr2.shape, (stride2,)) new1[...] = arr1 if not contig: # Ensure we did not overwrite bytes that should not be written: offset = arr1.dtype.itemsize if aligned else 0 buf = from_bytes[offset::stride1].tobytes() assert buf.count(b"\0") == len(buf) if contig: assert new1.flags.c_contiguous assert new2.flags.c_contiguous else: assert not new1.flags.c_contiguous assert not new2.flags.c_contiguous if aligned: assert new1.flags.aligned assert new2.flags.aligned else: assert not new1.flags.aligned or new1.dtype.alignment == 1 assert not new2.flags.aligned or new2.dtype.alignment == 1 return new1, new2 @pytest.mark.parametrize("from_Dt", simple_dtypes) def test_simple_cancast(self, from_Dt): for to_Dt in simple_dtypes: cast = get_castingimpl(from_Dt, to_Dt) for from_dt in [from_Dt(), from_Dt().newbyteorder()]: default = cast._resolve_descriptors((from_dt, None))[1][1] assert default == to_Dt() del default for to_dt in [to_Dt(), to_Dt().newbyteorder()]: casting, (from_res, to_res) = cast._resolve_descriptors( (from_dt, to_dt)) assert(type(from_res) == from_Dt) assert(type(to_res) == to_Dt) if casting & Casting.cast_is_view: # If a view is acceptable, this is "no" casting # and byte order must be matching. assert casting == Casting.no | Casting.cast_is_view # The above table lists this as "equivalent" assert Casting.equiv == CAST_TABLE[from_Dt][to_Dt] # Note that to_res may not be the same as from_dt assert from_res.isnative == to_res.isnative else: if from_Dt == to_Dt: # Note that to_res may not be the same as from_dt assert from_res.isnative != to_res.isnative assert casting == CAST_TABLE[from_Dt][to_Dt] if from_Dt is to_Dt: assert(from_dt is from_res) assert(to_dt is to_res) @pytest.mark.filterwarnings("ignore::numpy.ComplexWarning") @pytest.mark.parametrize("from_dt", simple_dtype_instances()) def test_simple_direct_casts(self, from_dt): """ This test checks numeric direct casts for dtypes supported also by the struct module (plus complex). It tries to be test a wide range of inputs, but skips over possibly undefined behaviour (e.g. int rollover). Longdouble and CLongdouble are tested, but only using double precision. If this test creates issues, it should possibly just be simplified or even removed (checking whether unaligned/non-contiguous casts give the same results is useful, though). """ for to_dt in simple_dtype_instances(): to_dt = to_dt.values[0] cast = get_castingimpl(type(from_dt), type(to_dt)) casting, (from_res, to_res) = cast._resolve_descriptors( (from_dt, to_dt)) if from_res is not from_dt or to_res is not to_dt: # Do not test this case, it is handled in multiple steps, # each of which should is tested individually. return safe = (casting & ~Casting.cast_is_view) <= Casting.safe del from_res, to_res, casting arr1, arr2, values = self.get_data(from_dt, to_dt) cast._simple_strided_call((arr1, arr2)) # Check via python list assert arr2.tolist() == values # Check that the same results are achieved for strided loops arr1_o, arr2_o = self.get_data_variation(arr1, arr2, True, False) cast._simple_strided_call((arr1_o, arr2_o)) assert_array_equal(arr2_o, arr2) assert arr2_o.tobytes() == arr2.tobytes() # Check if alignment makes a difference, but only if supported # and only if the alignment can be wrong if ((from_dt.alignment == 1 and to_dt.alignment == 1) or not cast._supports_unaligned): return arr1_o, arr2_o = self.get_data_variation(arr1, arr2, False, True) cast._simple_strided_call((arr1_o, arr2_o)) assert_array_equal(arr2_o, arr2) assert arr2_o.tobytes() == arr2.tobytes() arr1_o, arr2_o = self.get_data_variation(arr1, arr2, False, False) cast._simple_strided_call((arr1_o, arr2_o)) assert_array_equal(arr2_o, arr2) assert arr2_o.tobytes() == arr2.tobytes() del arr1_o, arr2_o, cast @pytest.mark.parametrize("from_Dt", simple_dtypes) def test_numeric_to_times(self, from_Dt): # We currently only implement contiguous loops, so only need to # test those. from_dt = from_Dt() time_dtypes = [np.dtype("M8"), np.dtype("M8[ms]"), np.dtype("M8[4D]"), np.dtype("m8"), np.dtype("m8[ms]"), np.dtype("m8[4D]")] for time_dt in time_dtypes: cast = get_castingimpl(type(from_dt), type(time_dt)) casting, (from_res, to_res) = cast._resolve_descriptors( (from_dt, time_dt)) assert from_res is from_dt assert to_res is time_dt del from_res, to_res assert(casting & CAST_TABLE[from_Dt][type(time_dt)]) int64_dt = np.dtype(np.int64) arr1, arr2, values = self.get_data(from_dt, int64_dt) arr2 = arr2.view(time_dt) arr2[...] = np.datetime64("NaT") if time_dt == np.dtype("M8"): # This is a bit of a strange path, and could probably be removed arr1[-1] = 0 # ensure at least one value is not NaT # The cast currently succeeds, but the values are invalid: cast._simple_strided_call((arr1, arr2)) with pytest.raises(ValueError): str(arr2[-1]) # e.g. conversion to string fails return cast._simple_strided_call((arr1, arr2)) assert [int(v) for v in arr2.tolist()] == values # Check that the same results are achieved for strided loops arr1_o, arr2_o = self.get_data_variation(arr1, arr2, True, False) cast._simple_strided_call((arr1_o, arr2_o)) assert_array_equal(arr2_o, arr2) assert arr2_o.tobytes() == arr2.tobytes() @pytest.mark.parametrize( ["from_dt", "to_dt", "expected_casting", "nom", "denom"], [("M8[ns]", None, Casting.no | Casting.cast_is_view, 1, 1), (str(np.dtype("M8[ns]").newbyteorder()), None, Casting.equiv, 1, 1), ("M8", "M8[ms]", Casting.safe | Casting.cast_is_view, 1, 1), ("M8[ms]", "M8", Casting.unsafe, 1, 1), # should be invalid cast ("M8[5ms]", "M8[5ms]", Casting.no | Casting.cast_is_view, 1, 1), ("M8[ns]", "M8[ms]", Casting.same_kind, 1, 10**6), ("M8[ms]", "M8[ns]", Casting.safe, 10**6, 1), ("M8[ms]", "M8[7ms]", Casting.same_kind, 1, 7), ("M8[4D]", "M8[1M]", Casting.same_kind, None, # give full values based on NumPy 1.19.x [-2**63, 0, -1, 1314, -1315, 564442610]), ("m8[ns]", None, Casting.no | Casting.cast_is_view, 1, 1), (str(np.dtype("m8[ns]").newbyteorder()), None, Casting.equiv, 1, 1), ("m8", "m8[ms]", Casting.safe | Casting.cast_is_view, 1, 1), ("m8[ms]", "m8", Casting.unsafe, 1, 1), # should be invalid cast ("m8[5ms]", "m8[5ms]", Casting.no | Casting.cast_is_view, 1, 1), ("m8[ns]", "m8[ms]", Casting.same_kind, 1, 10**6), ("m8[ms]", "m8[ns]", Casting.safe, 10**6, 1), ("m8[ms]", "m8[7ms]", Casting.same_kind, 1, 7), ("m8[4D]", "m8[1M]", Casting.unsafe, None, # give full values based on NumPy 1.19.x [-2**63, 0, 0, 1314, -1315, 564442610])]) def test_time_to_time(self, from_dt, to_dt, expected_casting, nom, denom): from_dt = np.dtype(from_dt) if to_dt is not None: to_dt = np.dtype(to_dt) # Test a few values for casting (results generated with NumPy 1.19) values = np.array([-2**63, 1, 2**63-1, 10000, -10000, 2**32]) values = values.astype(np.dtype("int64").newbyteorder(from_dt.byteorder)) assert values.dtype.byteorder == from_dt.byteorder assert np.isnat(values.view(from_dt)[0]) DType = type(from_dt) cast = get_castingimpl(DType, DType) casting, (from_res, to_res) = cast._resolve_descriptors((from_dt, to_dt)) assert from_res is from_dt assert to_res is to_dt or to_dt is None assert casting == expected_casting if nom is not None: expected_out = (values * nom // denom).view(to_res) expected_out[0] = "NaT" else: expected_out = np.empty_like(values) expected_out[...] = denom expected_out = expected_out.view(to_dt) orig_arr = values.view(from_dt) orig_out = np.empty_like(expected_out) if casting == Casting.unsafe and (to_dt == "m8" or to_dt == "M8"): # Casting from non-generic to generic units is an error and should # probably be reported as an invalid cast earlier. with pytest.raises(ValueError): cast._simple_strided_call((orig_arr, orig_out)) return for aligned in [True, True]: for contig in [True, True]: arr, out = self.get_data_variation( orig_arr, orig_out, aligned, contig) out[...] = 0 cast._simple_strided_call((arr, out)) assert_array_equal(out.view("int64"), expected_out.view("int64")) def string_with_modified_length(self, dtype, change_length): fact = 1 if dtype.char == "S" else 4 length = dtype.itemsize // fact + change_length return np.dtype(f"{dtype.byteorder}{dtype.char}{length}") @pytest.mark.parametrize("other_DT", simple_dtypes) @pytest.mark.parametrize("string_char", ["S", "U"]) def test_string_cancast(self, other_DT, string_char): fact = 1 if string_char == "S" else 4 string_DT = type(np.dtype(string_char)) cast = get_castingimpl(other_DT, string_DT) other_dt = other_DT() expected_length = get_expected_stringlength(other_dt) string_dt = np.dtype(f"{string_char}{expected_length}") safety, (res_other_dt, res_dt) = cast._resolve_descriptors((other_dt, None)) assert res_dt.itemsize == expected_length * fact assert safety == Casting.safe # we consider to string casts "safe" assert isinstance(res_dt, string_DT) # These casts currently implement changing the string length, so # check the cast-safety for too long/fixed string lengths: for change_length in [-1, 0, 1]: if change_length >= 0: expected_safety = Casting.safe else: expected_safety = Casting.same_kind to_dt = self.string_with_modified_length(string_dt, change_length) safety, (_, res_dt) = cast._resolve_descriptors((other_dt, to_dt)) assert res_dt is to_dt assert safety == expected_safety # The opposite direction is always considered unsafe: cast = get_castingimpl(string_DT, other_DT) safety, _ = cast._resolve_descriptors((string_dt, other_dt)) assert safety == Casting.unsafe cast = get_castingimpl(string_DT, other_DT) safety, (_, res_dt) = cast._resolve_descriptors((string_dt, None)) assert safety == Casting.unsafe assert other_dt is res_dt # returns the singleton for simple dtypes @pytest.mark.parametrize("string_char", ["S", "U"]) @pytest.mark.parametrize("other_dt", simple_dtype_instances()) def test_simple_string_casts_roundtrip(self, other_dt, string_char): """ Tests casts from and to string by checking the roundtripping property. The test also covers some string to string casts (but not all). If this test creates issues, it should possibly just be simplified or even removed (checking whether unaligned/non-contiguous casts give the same results is useful, though). """ string_DT = type(np.dtype(string_char)) cast = get_castingimpl(type(other_dt), string_DT) cast_back = get_castingimpl(string_DT, type(other_dt)) _, (res_other_dt, string_dt) = cast._resolve_descriptors((other_dt, None)) if res_other_dt is not other_dt: # do not support non-native byteorder, skip test in that case assert other_dt.byteorder != res_other_dt.byteorder return orig_arr, values = self.get_data(other_dt, None) str_arr = np.zeros(len(orig_arr), dtype=string_dt) string_dt_short = self.string_with_modified_length(string_dt, -1) str_arr_short = np.zeros(len(orig_arr), dtype=string_dt_short) string_dt_long = self.string_with_modified_length(string_dt, 1) str_arr_long = np.zeros(len(orig_arr), dtype=string_dt_long) assert not cast._supports_unaligned # if support is added, should test assert not cast_back._supports_unaligned for contig in [True, False]: other_arr, str_arr = self.get_data_variation( orig_arr, str_arr, True, contig) _, str_arr_short = self.get_data_variation( orig_arr, str_arr_short.copy(), True, contig) _, str_arr_long = self.get_data_variation( orig_arr, str_arr_long, True, contig) cast._simple_strided_call((other_arr, str_arr)) cast._simple_strided_call((other_arr, str_arr_short)) assert_array_equal(str_arr.astype(string_dt_short), str_arr_short) cast._simple_strided_call((other_arr, str_arr_long)) assert_array_equal(str_arr, str_arr_long) if other_dt.kind == "b": # Booleans do not roundtrip continue other_arr[...] = 0 cast_back._simple_strided_call((str_arr, other_arr)) assert_array_equal(orig_arr, other_arr) other_arr[...] = 0 cast_back._simple_strided_call((str_arr_long, other_arr)) assert_array_equal(orig_arr, other_arr) @pytest.mark.parametrize("other_dt", ["S8", "<U8", ">U8"]) @pytest.mark.parametrize("string_char", ["S", "U"]) def test_string_to_string_cancast(self, other_dt, string_char): other_dt = np.dtype(other_dt) fact = 1 if string_char == "S" else 4 div = 1 if other_dt.char == "S" else 4 string_DT = type(np.dtype(string_char)) cast = get_castingimpl(type(other_dt), string_DT) expected_length = other_dt.itemsize // div string_dt = np.dtype(f"{string_char}{expected_length}") safety, (res_other_dt, res_dt) = cast._resolve_descriptors((other_dt, None)) assert res_dt.itemsize == expected_length * fact assert isinstance(res_dt, string_DT) if other_dt.char == string_char: if other_dt.isnative: expected_safety = Casting.no | Casting.cast_is_view else: expected_safety = Casting.equiv elif string_char == "U": expected_safety = Casting.safe else: expected_safety = Casting.unsafe assert expected_safety == safety for change_length in [-1, 0, 1]: to_dt = self.string_with_modified_length(string_dt, change_length) safety, (_, res_dt) = cast._resolve_descriptors((other_dt, to_dt)) assert res_dt is to_dt if expected_safety == Casting.unsafe: assert safety == expected_safety elif change_length < 0: assert safety == Casting.same_kind elif change_length == 0: assert safety == expected_safety elif change_length > 0: assert safety == Casting.safe @pytest.mark.parametrize("order1", [">", "<"]) @pytest.mark.parametrize("order2", [">", "<"]) def test_unicode_byteswapped_cast(self, order1, order2): # Very specific tests (not using the castingimpl directly) # that tests unicode bytedwaps including for unaligned array data. dtype1 = np.dtype(f"{order1}U30") dtype2 = np.dtype(f"{order2}U30") data1 = np.empty(30 * 4 + 1, dtype=np.uint8)[1:].view(dtype1) data2 = np.empty(30 * 4 + 1, dtype=np.uint8)[1:].view(dtype2) if dtype1.alignment != 1: # alignment should always be >1, but skip the check if not assert not data1.flags.aligned assert not data2.flags.aligned element = "this is a ünicode string‽" data1[()] = element # Test both `data1` and `data1.copy()` (which should be aligned) for data in [data1, data1.copy()]: data2[...] = data1 assert data2[()] == element assert data2.copy()[()] == element def test_void_to_string_special_case(self): # Cover a small special case in void to string casting that could # probably just as well be turned into an error (compare # `test_object_to_parametric_internal_error` below). assert np.array([], dtype="V5").astype("S").dtype.itemsize == 5 assert np.array([], dtype="V5").astype("U").dtype.itemsize == 4 * 5 def test_object_to_parametric_internal_error(self): # We reject casting from object to a parametric type, without # figuring out the correct instance first. object_dtype = type(np.dtype(object)) other_dtype = type(np.dtype(str)) cast = get_castingimpl(object_dtype, other_dtype) with pytest.raises(TypeError, match="casting from object to the parametric DType"): cast._resolve_descriptors((np.dtype("O"), None)) @pytest.mark.parametrize("dtype", simple_dtype_instances()) def test_object_and_simple_resolution(self, dtype): # Simple test to exercise the cast when no instance is specified object_dtype = type(np.dtype(object)) cast = get_castingimpl(object_dtype, type(dtype)) safety, (_, res_dt) = cast._resolve_descriptors((np.dtype("O"), dtype)) assert safety == Casting.unsafe assert res_dt is dtype safety, (_, res_dt) = cast._resolve_descriptors((np.dtype("O"), None)) assert safety == Casting.unsafe assert res_dt == dtype.newbyteorder("=") @pytest.mark.parametrize("dtype", simple_dtype_instances()) def test_simple_to_object_resolution(self, dtype): # Simple test to exercise the cast when no instance is specified object_dtype = type(np.dtype(object)) cast = get_castingimpl(type(dtype), object_dtype) safety, (_, res_dt) = cast._resolve_descriptors((dtype, None)) assert safety == Casting.safe assert res_dt is np.dtype("O") @pytest.mark.parametrize("casting", ["no", "unsafe"]) def test_void_and_structured_with_subarray(self, casting): # test case corresponding to gh-19325 dtype = np.dtype([("foo", "<f4", (3, 2))]) expected = casting == "unsafe" assert np.can_cast("V4", dtype, casting=casting) == expected assert np.can_cast(dtype, "V4", casting=casting) == expected @pytest.mark.parametrize("dtype", np.typecodes["All"]) def test_object_casts_NULL_None_equivalence(self, dtype): # None to <other> casts may succeed or fail, but a NULL'ed array must # behave the same as one filled with None's. arr_normal = np.array([None] * 5) arr_NULLs = np.empty_like([None] * 5) # If the check fails (maybe it should) the test would lose its purpose: assert arr_NULLs.tobytes() == b"\x00" * arr_NULLs.nbytes try: expected = arr_normal.astype(dtype) except TypeError: with pytest.raises(TypeError): arr_NULLs.astype(dtype), else: assert_array_equal(expected, arr_NULLs.astype(dtype)) def test_float_to_bool(self): # test case corresponding to gh-19514 # simple test for casting bool_ to float16 res = np.array([0, 3, -7], dtype=np.int8).view(bool) expected = [0, 1, 1] assert_array_equal(res, expected)
{ "content_hash": "2877ec12188e128e6c2a1f52f1b61170", "timestamp": "", "source": "github", "line_count": 707, "max_line_length": 84, "avg_line_length": 41.25176803394625, "alnum_prop": 0.546922681296074, "repo_name": "simongibbons/numpy", "id": "d41d6dcc0541e7d1e1ee7d6619771c5fbaf64f37", "size": "29168", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "numpy/core/tests/test_casting_unittests.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "5486229" }, { "name": "C++", "bytes": "95911" }, { "name": "Cython", "bytes": "147831" }, { "name": "D", "bytes": "19" }, { "name": "Dockerfile", "bytes": "5130" }, { "name": "Fortran", "bytes": "8505" }, { "name": "JavaScript", "bytes": "16928" }, { "name": "Makefile", "bytes": "1697" }, { "name": "Python", "bytes": "9920017" }, { "name": "Shell", "bytes": "13540" }, { "name": "Smarty", "bytes": "4071" }, { "name": "TeX", "bytes": "896" }, { "name": "sed", "bytes": "5741" } ], "symlink_target": "" }
""" Global settings used through clifford. """ # private names for internal use within clifford _eps = 1e-12 # float epsilon for float comparisons _pretty = True # pretty-print global _print_precision = 5 # pretty printing precision on floats # public accessor functions to get and set the settings def pretty(precision=None): """Makes ``repr(MultiVector)`` default to pretty-print. `precision` arg can be used to set the printed precision. Parameters ----------- precision : int number of sig figs to print past decimal Examples ---------- >>> pretty(5) """ global _pretty _pretty = True if precision is not None: print_precision(precision) def ugly(): """ Makes ``repr(MultiVector)`` default to eval-able representation. """ global _pretty _pretty = False def eps(newEps=None): """ Get/Set the epsilon for float comparisons. """ global _eps if newEps is not None: _eps = newEps return _eps def print_precision(newVal): """Set the epsilon for float comparisons. Parameters ----------- newVal : int number of sig figs to print (see builtin `round`) Examples ---------- >>> print_precision(5) """ global _print_precision _print_precision = newVal
{ "content_hash": "09a931cbeba7e1a1741215d9959d27c5", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 76, "avg_line_length": 19.98507462686567, "alnum_prop": 0.6131441374159821, "repo_name": "arsenovic/clifford", "id": "0196078d111ad745cf1af113a5dfdbe98f95437e", "size": "1339", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clifford/_settings.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "478130" }, { "name": "Shell", "bytes": "1513" } ], "symlink_target": "" }
import os from setuptools import setup, find_packages def read(*rnames): return open(os.path.join(os.path.dirname(__file__), *rnames)).read() version = '0.1.0' long_description = (read('../../readme.rst')) setup(name='nexiles.gateway.example', version=version, description="A example nexiles|gateway service", long_description=long_description, classifiers=[ 'Intended Audience :: Developers', 'Topic :: Software Development :: Libraries :: Python Modules', ], keywords='', author='Stefan Eletzhofer', author_email='se@nexiles.de', url='https://github.com/nexiles/nexiles.gateway.example', license='proprietary', packages=find_packages('.', exclude=['ez_setup']), package_dir={'': '.'}, package_data={"nexiles.gateway.example": ["templates/*"]}, namespace_packages=['nexiles', 'nexiles.gateway'], include_package_data=True, zip_safe=True, install_requires=['setuptools', # 'nexiles.tools>=1.5.0' ], )
{ "content_hash": "ef6873e633ee22e5c8f9718fede87055", "timestamp": "", "source": "github", "line_count": 34, "max_line_length": 73, "avg_line_length": 31.941176470588236, "alnum_prop": 0.5994475138121547, "repo_name": "nexiles/nexiles.gateway.example", "id": "d31c2208079401a60d27bba931566d264fd52057", "size": "1111", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "src/nexiles.gateway.example/setup.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "JavaScript", "bytes": "45" }, { "name": "Python", "bytes": "8536" }, { "name": "Shell", "bytes": "387" } ], "symlink_target": "" }
from JumpScale import j # import JSModel import time # from abc import ABCMeta, abstractmethod import collections import msgpack import snappy import re # IMPORTANT: had to remove class for manipulating kvs object, class was more clear but 2 issues # - this needs to be converted to a functional C module which can be reused in many languages, doing this in OO is much more complex # - its slower, python is not very efficient with objects # - if its too slow now we can go to cython module quite easily because is just a method # - suggest to use nimlang to create c module class KeyValueStoreBase: # , metaclass=ABCMeta): '''KeyValueStoreBase defines a store interface.''' def __init__(self, namespace, name="", serializers=[], masterdb=None, cache=None, changelog=None): self.namespace = namespace self.name = name self.logger = j.logger.get('j.servers.kvs') self.serializers = serializers or list() self.unserializers = list(reversed(self.serializers)) self.cache = cache self.changelog = changelog self.masterdb = masterdb self._schema = b"" self.owner = j.application.owner self.inMem = False def __new__(cls, *args, **kwargs): ''' Copies the doc strings (when available) from the base implementation ''' attrs = iter(list(cls.__dict__.items())) for attrName, attr in attrs: if not attr.__doc__ and\ hasattr(KeyValueStoreBase, attrName) and\ not attrName.startswith('_')\ and isinstance(attr, collections.Callable): baseAttr = getattr(KeyValueStoreBase, attrName) attr.__doc__ = baseAttr.__doc__ return object.__new__(cls) @property def schema(self): return j.data.hash.bin2hex(self._schema).decode() @schema.setter def schema(self, val): """ is any free text to define the schema used for this store category """ if len(val) == 32: val = j.data.hash.hex2bin(val) elif len(val) != 16: raise j.exceptions.Input(message="schema needs to be 32 or 16 bytes", level=1, source="", tags="", msgpub="") if not j.data.types.bytes.check(val): raise j.exceptions.Input(message="schema needs to be in bytes", level=1, source="", tags="", msgpub="") self._schema = val @property def owner(self): """ to define who owns this object, normally when you open a store, you set the owner if not the default """ return j.data.hash.bin2hex(self._owner).decode() @owner.setter def owner(self, val): if len(val) == 32: val = j.data.hash.hex2bin(val) elif len(val) != 16: raise j.exceptions.Input(message="owner needs to be 32 or 16 bytes", level=1, source="", tags="", msgpub="") if not j.data.types.bytes.check(val): raise j.exceptions.Input(message="owner needs to be in bytes", level=1, source="", tags="", msgpub="") self._owner = val def _encode(self, val, expire=0, acl={}): # data = $type + $owner + $schema + $expire + $lengthacllist + # [acllist] + snappyencoded(val) + $crcOfAllPrevious # # serialize type (is more like a version e.g. version 1.0) # # type of this encoding, to make sure we have backwards compatibility # type = 4bits: $schemaYesNo,$expireYesNo,0,0 + 4 bit version of format now 0 ttype = 0 if self._schema != b"": ttype += 0b1000000 if expire != 0: expire = j.data.time.getTimeEpoch() + expire ttype += 0b0100000 expireb = expire.to_bytes(4, byteorder='big', signed=False) else: expireb = b"" # data should be binary if not lets msgpack if j.data.types.bytes.check(val) == False: val = j.data.serializer.msgpack.dumps(val) ttype += 0b0010000 typeb = ttype.to_bytes(1, byteorder='big', signed=False) aclb = j.servers.kvs._aclSerialze(acl) # TODO: *1 not sure this is ok, why str encode, what if its binary? # val = str.encode(val) val = snappy.compress(val) serialized = typeb + self._owner + self._schema + expireb + aclb + val # checksum crc = j.data.hash.crc32_string(serialized) crc = crc.to_bytes(4, byteorder='big', signed=False) return serialized + crc def _decode(self, data): """ @return [val,owner="",schema="",expire=0,acl={}] """ crcint = j.data.hash.crc32_string(data[:-4]) crc = crcint.to_bytes(4, byteorder='big', signed=False) if not crc == data[-4:]: raise j.exceptions.Input(message="Invalid checksum (CRC), is this a valid object ?:%s" % data) # # parsing header # header = data[0] counter = 1 owner = j.data.hash.bin2hex(data[counter:counter + 16]).decode() counter += 16 if header & 0b1000000: # schema defined schema = j.data.hash.bin2hex(data[counter:counter + 16]) counter += 16 else: # no schema schema = "" if header & 0b0100000: # expire is set expire = int.from_bytes(data[counter:counter + 4], byteorder='big', signed=False) counter += 4 else: expire = 0 nrsecrets = int.from_bytes(data[counter:counter + 1], byteorder='big', signed=False) aclbin = data[counter:counter + 17 * nrsecrets + 1] counter += 17 * nrsecrets + 1 acl = j.servers.kvs._aclUnserialze(aclbin) val = data[counter:-4] val = snappy.decompress(val) if header & 0b0010000: val = j.data.serializer.msgpack.loads(val) return (val, owner, schema, expire, acl) def set(self, key, value=None, expire=None, acl={}, secret=""): """ @param secret, when not specified the owner will be used, allows to specify different secret than youw own owner key @param expire is seconds from now, when obj will expire if you want to set then needs to be an int>0 or 0 """ res = self.getraw(key, secret=secret, die=False, modecheck="w") if res != None: (valOld, owner, schemaOld, expireOld, aclOld) = res if schemaOld != self.schema: msg = "schema of this db instance should be same as what is found in db" raise j.exceptions.Input(message=msg, level=1) if expire == None: expire = expireOld if value == None: value = valOld acl.update(aclOld) else: if expire == None: expire = 0 if value == None: raise j.exceptions.Input(message="value needs to be set (not None), key:%s" % key, level=1, source="", tags="", msgpub="") value2 = self.serialize(value) data = self._encode(value2, expire, acl) # the expire is non-generic as it only translates to the redis core does not break at the moment. # any _set should take expire to allow compatability. self._set(key, data, expire) # if self.cache != None: # self.cache._set(key=key, category=category, value=value1) def exists(self, key, secret=""): try: res = self.get(key, secret=secret) except Exception as e: if "not allowed" in str(e): # exists but no access, should just return False # return False # raise j.exceptions.Input(message="Object '%s' does exist but I have not rights." % # key, level=1, source="", tags="", msgpub="") return True if "Cannot find" in str(e): return False raise e return res != None def get(self, key, secret=""): ''' Gets a key value pair from the store @param: key of the key value pair @type: String @return: value of the key value pair ''' return self.getraw(key, secret, die=True)[0] def getraw(self, key, secret="", die=False, modecheck="r"): ''' Gets a key value pair from the store @param: key of the key value pair @type: String @param: modecheck is r,w or d, normally get always needs to check on r but can overrule this, can be more than 1 e.g. rw @return: (val, owner, schema, expire, acl) ''' # if self.cache != None: # res = self.cache._get(key=key, category=category) # get raw data # if res != None: # return self.unserialize(self._decode(res)) data = self._get(key) if data is None: if die: raise j.exceptions.Input(message="Cannot find object: %s" % key, level=1, source="", tags="", msgpub="") else: return None if secret == "": secret = self.owner (val, owner, schema, expire, acl) = self._decode(data) if j.servers.kvs._aclCheck(acl, owner, secret, modecheck) is False: raise j.exceptions.Input(message="cannot get obj with key '%s' because mode '%s' is not allowed." % ( key, modecheck), level=1, source="", tags="", msgpub="") val = self.unserialize(val) return (val, owner, schema, expire, acl) def delete(self, key, secret=""): val, owner, schema, expire, acl = self.getraw(key, secret=secret, modecheck='d', die=True) if secret is not None and secret != '' and owner != secret: raise j.exceptions.Input(message="Cannot delete object, only owner can delete an object", level=1, source="", tags="", msgpub="") self._delete(key=key) def serialize(self, value): for serializer in self.serializers: value = serializer.dumps(value) return value def unserialize(self, value): for serializer in self.unserializers: if value is not None: value = serializer.loads(value) return value def destroy(self): raise NotImplemented() def index(self, items, secret=""): """ @param items is {indexitem:key} indexitem is e.g. $actorname:$state:$role (is a text which will be index to key) indexitems are always made lowercase key links to the object in the db ':' is not allowed in indexitem """ if secret == "": secret = self.owner indexobj = self.getraw("index", die=False, secret=secret) if indexobj == None: ddict = {} else: ddict = msgpack.loads(indexobj) ddict.update(items) data2 = msgpack.dumps(ddict) self.set("index", data2, secret=secret) def index_remove(self, keys, secret=""): self.delete("index") def list(self, regex=".*", returnIndex=False, secret=""): """ regex is regex on the index, will return matched keys e.g. .*:new:.* would match e.g. all obj with state new """ indexobj = self.getraw("index", die=False, secret=secret) if indexobj == None: ddict = {} else: ddict = msgpack.loads(indexobj) res = set() for item, key in ddict.items(): item = item.decode() key = key.decode() if re.match(regex, item) is not None: if returnIndex is False: for key2 in key.split(","): res.add(key2) else: for key2 in key.split(","): res.add((item, key2)) return list(res) # DO NOT LOOK AT BELOW RIGHT NOW IS FOR FUTURE # def checkChangeLog(self): # pass # # def cacheSet(self, key, value, expirationInSecondsFromNow=120): # ttime = j.data.time.getTimeEpoch() # value = [ttime + expirationInSecondsFromNow, value] # if key == "": # key = j.data.idgenerator.generateGUID() # self.set(category="cache", key=key, value=value) # return key # # def cacheGet(self, key, deleteAfterGet=False): # r = self.get("cache", key) # if deleteAfterGet: # self.delete("cache", key) # return r[1] # # def cacheDelete(self, key): # self.delete("cache", key) # # def cacheExists(self, key): # return self.exists("cache", key) # # def cacheList(self): # # if "cache" in self.listCategories(): # return self.list("cache") # else: # return [] # # def cacheExpire(self): # now = j.data.time.getTimeEpoch() # for key in self.list(): # expiretime, val = self.get(key) # if expiretime > now: # self.delete("cache", key) # # @abstractmethod # def exists(self, category, key): # ''' # Checks if a key value pair exists in the store. # # @param: category of the key value pair # @type: String # # @param: key of the key value pair # @type: String # # @return: flag that states if the key value pair exists or not # @rtype: Boolean # ''' # # @abstractmethod # def list(self, category, prefix): # ''' # Lists the keys matching `prefix` in `category`. # # @param category: category the keys should be in # @type category: String # @param prefix: prefix the keys should start with # @type prefix: String # @return: keys that match `prefix` in `category`. # @rtype: List(String) # ''' # raise j.exceptions.NotImplemented("list is only supported on selected db's") # # @abstractmethod # def listCategories(self): # ''' # Lists the categories in this db. # # @return: categories in this db # @rtype: List(String) # ''' # # @abstractmethod # def _categoryExists(self, category): # ''' # Checks if a category exists # # @param category: category to check # @type category: String # @return: True if the category exists, False otherwise # @rtype: Boolean # ''' # # def lock(self, locktype, info="", timeout=5, timeoutwait=0, force=False): # """ # if locked will wait for time specified # @param locktype of lock is in style machine.disk.import (dot notation) # @param timeout is the time we want our lock to last # @param timeoutwait wait till lock becomes free # @param info is info which will be kept in lock, can be handy to e.g. mention why lock taken # @param force, if force will erase lock when timeout is reached # @return None # """ # category = "lock" # lockfree = self._lockWait(locktype, timeoutwait) # if not lockfree: # if force == False: # raise j.exceptions.RuntimeError("Cannot lock %s %s" % (locktype, info)) # value = [self.id, j.data.time.getTimeEpoch() + timeout, info] # encodedValue = j.data.serializer.json.dumps(value) # self.settest(category, locktype, encodedValue) # # def lockCheck(self, locktype): # """ # @param locktype of lock is in style machine.disk.import (dot notation) # @return result,id,lockEnd,info (lockEnd is time when lock times out, info is descr of lock, id is who locked) # result is False when free, True when lock is active # """ # if self.exists("lock", locktype): # encodedValue = self.get("lock", locktype) # try: # id, lockEnd, info = j.data.serializer.json.loads(encodedValue) # except ValueError: # self.logger.error("Failed to decode lock value") # raise ValueError("Invalid lock type %s" % locktype) # # if j.data.time.getTimeEpoch() > lockEnd: # self.delete("lock", locktype) # return False, 0, 0, "" # value = [True, id, lockEnd, info] # return value # else: # return False, 0, 0, "" # # def _lockWait(self, locktype, timeoutwait=0): # """ # wait till lock free # @return True when free, False when unable to free # """ # locked, id, lockEnd, info = self.lockCheck(locktype) # if locked: # start = j.data.time.getTimeEpoch() # if lockEnd + timeoutwait < start: # # the lock was already timed out so is free # return True # # while True: # now = j.data.time.getTimeEpoch() # if now > start + timeoutwait: # return False # if now > lockEnd: # return True # time.sleep(0.1) # return True # # def unlock(self, locktype, timeoutwait=0, force=False): # """ # @param locktype of lock is in style machine.disk.import (dot notation) # """ # lockfree = self._lockWait(locktype, timeoutwait) # if not lockfree: # if force == False: # raise j.exceptions.RuntimeError("Cannot unlock %s" % locktype) # self.delete("lock", locktype) # # def incrementReset(self, incrementtype, newint=0): # """ # @param incrementtype : type of increment is in style machine.disk.nrdisk (dot notation) # """ # self.set("increment", incrementtype, str(newint)) # # def increment(self, incrementtype): # """ # @param incrementtype : type of increment is in style machine.disk.nrdisk (dot notation) # """ # if not self.exists("increment", incrementtype): # self.set("increment", incrementtype, "1") # incr = 1 # else: # rawOldIncr = self.get("increment", incrementtype) # if not rawOldIncr.isdigit(): # raise ValueError("Increment type %s does not have a digit value: %s" % (incrementtype, rawOldIncr)) # oldIncr = int(rawOldIncr) # incr = oldIncr + 1 # self.set("increment", incrementtype, str(incr)) # return incr # # def getNrRecords(self, incrementtype): # if not self.exists("increment", incrementtype): # self.set("increment", incrementtype, "1") # incr = 1 # return int(self.get("increment", incrementtype)) # # def settest(self, category, key, value): # """ # if well stored return True # """ # self.set(category, key, value) # if self.get(category, key) == value: # return True # return False # # def _assertValidCategory(self, category): # if not isinstance(category, str) or not category: # raise ValueError('Invalid category, non empty string expected') # # def _assertValidKey(self, key): # if not isinstance(key, str) or not key: # raise ValueError('Invalid key, non empty string expected') # # def _assertExists(self, category, key): # if not self.exists(category, key): # errorMessage = 'Key value store doesnt have a value for key '\ # '"%s" in category "%s"' % (key, category) # self.logger.error(errorMessage) # raise KeyError(errorMessage) # # def _assertCategoryExists(self, category): # if not self._categoryExists(category): # errorMessage = 'Key value store doesn\'t have a category %s' % (category) # self.logger.error(errorMessage) # raise KeyError(errorMessage) # # def now(self): # """ # return current time # """ # return j.data.time.getTimeEpoch() # # def getModifySet(self, category, key, modfunction, **kwargs): # """ # get value # give as parameter to modfunction # try to set by means of testset, if not succesfull try again, will use random function to maximize chance to set # @param kwargs are other parameters as required (see usage in subscriber function) # """ # counter = 0 # while counter < 30: # data = self.get(category, key) # data2 = modfunction(data) # if self.settest(category, key, data2): # break # go out of loop, could store well # time.time.sleep(float(j.data.idgenerator.generateRandomInt(1, 10)) / 50) # counter += 1 # return data2 # # def subscribe(self, subscriberid, category, startid=0): # """ # each subscriber is identified by a key # in db there is a dict stored on key for category = category of this method # value= dict with as keys the subscribers # {"kristof":[lastaccessedTime,lastId],"pol":...} # # """ # if not self.exists("subscribers", category): # data = {subscriberid: [self.now(), startid]} # else: # if startid != 0: # if not self.exists(category, startid): # raise j.exceptions.RuntimeError( # "Cannot find %s:%s in db, cannot subscribe, select valid startid" % (category, startid)) # # def modfunction(data, subscriberid, db, startid): # data[subscriberid] = [db.now(), startid] # return data # # self.getModifySet("subscribers", category, modfunction, # subscriberid=subscriberid, db=self, startid=startid) # # def subscriptionGetNextItem(self, subscriberid, category, autoConfirm=True): # """ # get next item from subscription # returns # False,None when no next # True,the data when a next # """ # if not self.exists("subscribers", category): # raise j.exceptions.RuntimeError("cannot find subscription") # data = self.get("subscribers", category) # if subscriberid not in data: # raise j.exceptions.RuntimeError("cannot find subscriber") # lastaccesstime, lastid = data[subscriberid] # lastid += 1 # if not self.exists(category, startid): # return False, None # else: # return True, self.get(category, startid) # if autoConfirm: # self.subscriptionAdvance(subscriberid, category, lastid) # return self.get(category, key) # # def subscriptionAdvance(self, subscriberid, category, lastProcessedId): # # def modfunction(data, subscriberid, db, lastProcessedId): # data[subscriberid] = [db.now(), lastProcessedId] # return data # # self.getModifySet("subscribers", category, modfunction, subscriberid=subscriberid, # db=self, lastProcessedId=lastProcessedId) # # def setDedupe(self, category, data): # """ # will return unique key which references the data, if it exists or not # """ # if data == "" or data == None: # return "" # if len(data) < 32: # return data # md5 = j.data.hash.md5_string(data) # if not self.exists(category, md5): # self.set(category, md5, data) # return md5 # # def getDedupe(self, category, key): # if len(key) < 32: # return key.encode() # return self.get(category, key)
{ "content_hash": "2228445d1d4329ba0251a546287ac174", "timestamp": "", "source": "github", "line_count": 686, "max_line_length": 132, "avg_line_length": 35.36880466472303, "alnum_prop": 0.5494786300127766, "repo_name": "Jumpscale/jumpscale_core8", "id": "9b85c0979d558536dfda5e1e960544599962c414", "size": "24263", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lib/JumpScale/servers/key_value_store/store.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "1113" }, { "name": "Cap'n Proto", "bytes": "9033" }, { "name": "Lua", "bytes": "12538" }, { "name": "Python", "bytes": "4343122" }, { "name": "Shell", "bytes": "7091" } ], "symlink_target": "" }
import MySQLdb class Connetion(): def __init__(self): pass def get_connetction(self): return MySQLdb.connect(host="localhost", user="root", passwd="123", db="graduation", use_unicode=True, charset="utf8")
{ "content_hash": "eff5760ecdb222dda9ba9dfe2ef9d91d", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 92, "avg_line_length": 26.3, "alnum_prop": 0.5703422053231939, "repo_name": "zhangsh950618/graduation", "id": "c12840e962c5ef17c724832e3fcb5133250c0623", "size": "288", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "util/connection.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "66948" } ], "symlink_target": "" }
from django.db import migrations class Migration(migrations.Migration): dependencies = [ ("zerver", "0264_migrate_is_announcement_only"), ] operations = [ migrations.RemoveField( model_name="stream", name="is_announcement_only", ), ]
{ "content_hash": "5b65636d4e20758fc8ee975bbe527433", "timestamp": "", "source": "github", "line_count": 15, "max_line_length": 56, "avg_line_length": 20.133333333333333, "alnum_prop": 0.5827814569536424, "repo_name": "punchagan/zulip", "id": "1eec63286bb469644c1c4a1989b822686e308b44", "size": "353", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "zerver/migrations/0265_remove_stream_is_announcement_only.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "453615" }, { "name": "Dockerfile", "bytes": "4898" }, { "name": "Emacs Lisp", "bytes": "157" }, { "name": "HTML", "bytes": "607321" }, { "name": "Handlebars", "bytes": "315160" }, { "name": "JavaScript", "bytes": "3572990" }, { "name": "Perl", "bytes": "9884" }, { "name": "Puppet", "bytes": "94991" }, { "name": "Python", "bytes": "8750579" }, { "name": "Ruby", "bytes": "3875" }, { "name": "Shell", "bytes": "134468" }, { "name": "TypeScript", "bytes": "223296" } ], "symlink_target": "" }
"""Helper methods for running Tensorboard from the client.""" from typing import Any, Mapping, Optional from xmanager import xm class TensorboardProvider: """A class to generate package and job/args to Tensorboard jobs.""" DEFAULT_TENSORBOARD_PORT = 6006 @staticmethod def get_tensorboard_packageable(timeout_secs: int) -> xm.PythonContainer: """Creates container spec running TensorBoard server. Args: timeout_secs: Seconds to run the server for. Note that a value of 0 disables the associated timeout. Raises: RuntimeError: `timeout_secs` is negative. Returns: Spec of container running TensorBoard server for a specified period of time. """ if timeout_secs < 0: raise RuntimeError('`timeout_secs` must be a nonnegative number') return xm.PythonContainer( base_image='tensorflow/tensorflow', entrypoint=xm.CommandList([f'timeout {timeout_secs}s tensorboard'])) @staticmethod def get_tensorboard_job_args( log_dir: str, port: Optional[int] = None, additional_args: Optional[Mapping[str, Any]] = None, ) -> Mapping[str, Any]: """Get arguments to start a Tensorboard job.""" args = { 'logdir': log_dir, 'port': port or TensorboardProvider.DEFAULT_TENSORBOARD_PORT, # Allows accessing visualisations from Docker container running locally. 'host': '0.0.0.0', # This is set to true by default when running Tensorboard. # Since it doesn't seem to be working well with GKE Workload Identity, # we set it to false for now. Can be overriden through the # `additional_args` param. # # https://github.com/tensorflow/tensorboard/issues/4784#issuecomment-868945650 'load_fast': 'false' } if additional_args: args.update(additional_args) return args def add_tensorboard( experiment: xm.Experiment, logdir: str, executor: xm.Executor, timeout_secs: int = 60 * 60 * 24, args: Optional[Mapping[str, Any]] = None, ) -> None: """Self-contained function which adds a Tensorboard auxiliary job to @experiment.""" provider = TensorboardProvider [executable] = experiment.package([ xm.Packageable( provider.get_tensorboard_packageable(timeout_secs=timeout_secs), executor.Spec()) ]) job = xm.Job( executable, executor, args=provider.get_tensorboard_job_args(logdir, additional_args=args), name='tensorboard') # TODO: Add support for `termination_delay_secs`. experiment.add(xm.AuxiliaryUnitJob(job, termination_delay_secs=0))
{ "content_hash": "0cd9f2fb38652cecb39460bdb26acc0c", "timestamp": "", "source": "github", "line_count": 82, "max_line_length": 86, "avg_line_length": 32.08536585365854, "alnum_prop": 0.6731280881793995, "repo_name": "deepmind/xmanager", "id": "973de15db1433e0d06bd57abb871dfe972827886", "size": "3225", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "xmanager/contrib/tensorboard.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Jupyter Notebook", "bytes": "42109" }, { "name": "Mako", "bytes": "1252" }, { "name": "Python", "bytes": "722416" }, { "name": "Shell", "bytes": "13219" } ], "symlink_target": "" }
from nose import tools from datetime import date import model_helpers from django.conf import settings # Specify a filename template that make use of all capabilities of upload_to template settings.UPLOAD_TO_OPTIONS = {"file_name_template": "{model_name}/%Y/{filename}-{instance.pk}.{extension}"} class FakeModel(object): pk = 1 def test_upload_to(): fake_instance = FakeModel() upload_to = model_helpers.UploadTo(max_filename_length=10) # get upload_to function with short filename year = date.today().year tools.assert_equal( upload_to(fake_instance, "/tmp/filezx/myfile.png"), "FakeModel/%d/myfile-1.png" % year) tools.assert_equal( upload_to(fake_instance, "/tmp/filezx/1234567890123456.png"), "FakeModel/%d/1234567890-1.png" % year) tools.assert_raises(ValueError, upload_to, fake_instance, "/tmp/filezx/1234567890123456.php") tools.assert_raises(ValueError, upload_to, fake_instance, "/tmp/filezx/1234567890123456.pHp") tools.assert_raises(ValueError, upload_to, fake_instance, "/tmp/filezx/.pHp") # Validate model_helper's upload_to function (Shortcut for using UploadTo class) tools.assert_equal( model_helpers.upload_to(fake_instance, "/tmp/filezx/myfile.png"), "FakeModel/%d/myfile-1.png" % year)
{ "content_hash": "cf2e93bbd633efaa535e5dfdb411370f", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 108, "avg_line_length": 40.84375, "alnum_prop": 0.7084927314460597, "repo_name": "rewardz/django_model_helpers", "id": "9a59af56ad2efe41de5d834fd157ace67c944897", "size": "1307", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/test_upload_to.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "31162" } ], "symlink_target": "" }
from jsonrpc import ServiceProxy access = ServiceProxy("http://127.0.0.1:55903") pwd = raw_input("Enter wallet passphrase: ") access.walletpassphrase(pwd, 60)
{ "content_hash": "c502e57edb9e898c24f9ae182f3ad300", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 47, "avg_line_length": 39.5, "alnum_prop": 0.7658227848101266, "repo_name": "grumpycoin/grumpycoin-v.1.2", "id": "8cf711cdd585efbca62c2174c87613c21fd08b21", "size": "158", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "contrib/wallettools/walletunlock.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "78622" }, { "name": "C++", "bytes": "1375855" }, { "name": "IDL", "bytes": "11702" }, { "name": "Objective-C", "bytes": "2463" }, { "name": "Python", "bytes": "36361" }, { "name": "Shell", "bytes": "17733" }, { "name": "TypeScript", "bytes": "3810608" } ], "symlink_target": "" }
import mock from oslo_utils import timeutils from oslo_versionedobjects import exception as ovo_exc from nova.compute import manager as compute_manager from nova import db from nova import exception from nova import objects from nova.objects import aggregate from nova.objects import service from nova import test from nova.tests.unit.objects import test_compute_node from nova.tests.unit.objects import test_objects NOW = timeutils.utcnow().replace(microsecond=0) fake_service = { 'created_at': NOW, 'updated_at': None, 'deleted_at': None, 'deleted': False, 'id': 123, 'host': 'fake-host', 'binary': 'fake-service', 'topic': 'fake-service-topic', 'report_count': 1, 'forced_down': False, 'disabled': False, 'disabled_reason': None, 'last_seen_up': None, 'version': service.SERVICE_VERSION, } OPTIONAL = ['availability_zone', 'compute_node'] class _TestServiceObject(object): def supported_hv_specs_comparator(self, expected, obj_val): obj_val = [inst.to_list() for inst in obj_val] self.assertJsonEqual(expected, obj_val) def pci_device_pools_comparator(self, expected, obj_val): obj_val = obj_val.obj_to_primitive() self.assertJsonEqual(expected, obj_val) def comparators(self): return {'stats': self.assertJsonEqual, 'host_ip': self.assertJsonEqual, 'supported_hv_specs': self.supported_hv_specs_comparator, 'pci_device_pools': self.pci_device_pools_comparator} def subs(self): return {'supported_hv_specs': 'supported_instances', 'pci_device_pools': 'pci_stats'} def _test_query(self, db_method, obj_method, *args, **kwargs): self.mox.StubOutWithMock(db, db_method) db_exception = kwargs.pop('db_exception', None) if db_exception: getattr(db, db_method)(self.context, *args, **kwargs).AndRaise( db_exception) else: getattr(db, db_method)(self.context, *args, **kwargs).AndReturn( fake_service) self.mox.ReplayAll() obj = getattr(service.Service, obj_method)(self.context, *args, **kwargs) if db_exception: self.assertIsNone(obj) else: self.compare_obj(obj, fake_service, allow_missing=OPTIONAL) def test_get_by_id(self): self._test_query('service_get', 'get_by_id', 123) def test_get_by_host_and_topic(self): self._test_query('service_get_by_host_and_topic', 'get_by_host_and_topic', 'fake-host', 'fake-topic') def test_get_by_host_and_binary(self): self._test_query('service_get_by_host_and_binary', 'get_by_host_and_binary', 'fake-host', 'fake-binary') def test_get_by_host_and_binary_raises(self): self._test_query('service_get_by_host_and_binary', 'get_by_host_and_binary', 'fake-host', 'fake-binary', db_exception=exception.HostBinaryNotFound( host='fake-host', binary='fake-binary')) def test_get_by_compute_host(self): self._test_query('service_get_by_compute_host', 'get_by_compute_host', 'fake-host') def test_get_by_args(self): self._test_query('service_get_by_host_and_binary', 'get_by_args', 'fake-host', 'fake-binary') def test_create(self): self.mox.StubOutWithMock(db, 'service_create') db.service_create(self.context, {'host': 'fake-host', 'version': fake_service['version']} ).AndReturn(fake_service) self.mox.ReplayAll() service_obj = service.Service(context=self.context) service_obj.host = 'fake-host' service_obj.create() self.assertEqual(fake_service['id'], service_obj.id) self.assertEqual(service.SERVICE_VERSION, service_obj.version) def test_recreate_fails(self): self.mox.StubOutWithMock(db, 'service_create') db.service_create(self.context, {'host': 'fake-host', 'version': fake_service['version']} ).AndReturn(fake_service) self.mox.ReplayAll() service_obj = service.Service(context=self.context) service_obj.host = 'fake-host' service_obj.create() self.assertRaises(exception.ObjectActionError, service_obj.create) def test_save(self): self.mox.StubOutWithMock(db, 'service_update') db.service_update(self.context, 123, {'host': 'fake-host', 'version': fake_service['version']} ).AndReturn(fake_service) self.mox.ReplayAll() service_obj = service.Service(context=self.context) service_obj.id = 123 service_obj.host = 'fake-host' service_obj.save() self.assertEqual(service.SERVICE_VERSION, service_obj.version) @mock.patch.object(db, 'service_create', return_value=fake_service) def test_set_id_failure(self, db_mock): service_obj = service.Service(context=self.context, binary='compute') service_obj.create() self.assertRaises(ovo_exc.ReadOnlyFieldError, setattr, service_obj, 'id', 124) def _test_destroy(self): self.mox.StubOutWithMock(db, 'service_destroy') db.service_destroy(self.context, 123) self.mox.ReplayAll() service_obj = service.Service(context=self.context) service_obj.id = 123 service_obj.destroy() def test_destroy(self): # The test harness needs db.service_destroy to work, # so avoid leaving it broken here after we're done orig_service_destroy = db.service_destroy try: self._test_destroy() finally: db.service_destroy = orig_service_destroy def test_get_by_topic(self): self.mox.StubOutWithMock(db, 'service_get_all_by_topic') db.service_get_all_by_topic(self.context, 'fake-topic').AndReturn( [fake_service]) self.mox.ReplayAll() services = service.ServiceList.get_by_topic(self.context, 'fake-topic') self.assertEqual(1, len(services)) self.compare_obj(services[0], fake_service, allow_missing=OPTIONAL) @mock.patch('nova.db.service_get_all_by_binary') def test_get_by_binary(self, mock_get): mock_get.return_value = [fake_service] services = service.ServiceList.get_by_binary(self.context, 'fake-binary') self.assertEqual(1, len(services)) mock_get.assert_called_once_with(self.context, 'fake-binary') def test_get_by_host(self): self.mox.StubOutWithMock(db, 'service_get_all_by_host') db.service_get_all_by_host(self.context, 'fake-host').AndReturn( [fake_service]) self.mox.ReplayAll() services = service.ServiceList.get_by_host(self.context, 'fake-host') self.assertEqual(1, len(services)) self.compare_obj(services[0], fake_service, allow_missing=OPTIONAL) def test_get_all(self): self.mox.StubOutWithMock(db, 'service_get_all') db.service_get_all(self.context, disabled=False).AndReturn( [fake_service]) self.mox.ReplayAll() services = service.ServiceList.get_all(self.context, disabled=False) self.assertEqual(1, len(services)) self.compare_obj(services[0], fake_service, allow_missing=OPTIONAL) def test_get_all_with_az(self): self.mox.StubOutWithMock(db, 'service_get_all') self.mox.StubOutWithMock(aggregate.AggregateList, 'get_by_metadata_key') db.service_get_all(self.context, disabled=None).AndReturn( [dict(fake_service, topic='compute')]) agg = aggregate.Aggregate(context=self.context) agg.name = 'foo' agg.metadata = {'availability_zone': 'test-az'} agg.create() agg.hosts = [fake_service['host']] aggregate.AggregateList.get_by_metadata_key(self.context, 'availability_zone', hosts=set(agg.hosts)).AndReturn([agg]) self.mox.ReplayAll() services = service.ServiceList.get_all(self.context, set_zones=True) self.assertEqual(1, len(services)) self.assertEqual('test-az', services[0].availability_zone) def test_compute_node(self): fake_compute_node = objects.ComputeNode._from_db_object( self.context, objects.ComputeNode(), test_compute_node.fake_compute_node) self.mox.StubOutWithMock(objects.ComputeNodeList, 'get_all_by_host') objects.ComputeNodeList.get_all_by_host( self.context, 'fake-host').AndReturn( [fake_compute_node]) self.mox.ReplayAll() service_obj = service.Service(id=123, host="fake-host", binary="nova-compute") service_obj._context = self.context self.assertEqual(service_obj.compute_node, fake_compute_node) # Make sure it doesn't re-fetch this service_obj.compute_node def test_load_when_orphaned(self): service_obj = service.Service() service_obj.id = 123 self.assertRaises(exception.OrphanedObjectError, getattr, service_obj, 'compute_node') @mock.patch.object(objects.ComputeNodeList, 'get_all_by_host') def test_obj_make_compatible_for_compute_node(self, get_all_by_host): service_obj = objects.Service(context=self.context) fake_service_dict = fake_service.copy() fake_compute_obj = objects.ComputeNode(host=fake_service['host'], service_id=fake_service['id']) get_all_by_host.return_value = [fake_compute_obj] service_obj.obj_make_compatible(fake_service_dict, '1.9') self.assertEqual( fake_compute_obj.obj_to_primitive(target_version='1.10'), fake_service_dict['compute_node']) @mock.patch.object(objects.ComputeNodeList, 'get_all_by_host') def test_obj_make_compatible_with_juno_computes(self, get_all_by_host): service_attrs = dict(fake_service) del service_attrs['version'] service_obj = objects.Service( context=self.context, **service_attrs) service_obj.binary = 'nova-compute' fake_service_dict = fake_service.copy() fake_service_dict['binary'] = 'nova-compute' fake_compute_obj = objects.ComputeNode(host=fake_service['host'], service_id=fake_service['id']) get_all_by_host.return_value = [fake_compute_obj] # Juno versions : # Service : 1.4 # ComputeNode : 1.5 service_obj.obj_make_compatible(fake_service_dict, '1.4') self.assertEqual( '1.5', fake_service_dict['compute_node']['nova_object.version']) @mock.patch('nova.db.service_get_minimum_version') def test_get_minimum_version_none(self, mock_get): mock_get.return_value = None self.assertEqual(0, objects.Service.get_minimum_version(self.context, 'compute')) mock_get.assert_called_once_with(self.context, 'compute', use_slave=False) @mock.patch('nova.db.service_get_minimum_version') def test_get_minimum_version(self, mock_get): mock_get.return_value = 123 self.assertEqual(123, objects.Service.get_minimum_version(self.context, 'compute')) mock_get.assert_called_once_with(self.context, 'compute', use_slave=False) @mock.patch('nova.db.service_get_minimum_version', return_value=2) def test_create_above_minimum(self, mock_get): with mock.patch('nova.objects.service.SERVICE_VERSION', new=3): objects.Service(context=self.context, binary='compute').create() @mock.patch('nova.db.service_get_minimum_version', return_value=2) def test_create_equal_to_minimum(self, mock_get): with mock.patch('nova.objects.service.SERVICE_VERSION', new=2): objects.Service(context=self.context, binary='compute').create() @mock.patch('nova.db.service_get_minimum_version', return_value=2) def test_create_below_minimum(self, mock_get): with mock.patch('nova.objects.service.SERVICE_VERSION', new=1): self.assertRaises(exception.ServiceTooOld, objects.Service(context=self.context, binary='compute', ).create) class TestServiceObject(test_objects._LocalTest, _TestServiceObject): pass class TestRemoteServiceObject(test_objects._RemoteTest, _TestServiceObject): pass class TestServiceVersion(test.TestCase): def _collect_things(self): data = { 'compute_rpc': compute_manager.ComputeManager.target.version, } return data def test_version(self): calculated = self._collect_things() self.assertEqual( len(service.SERVICE_VERSION_HISTORY), service.SERVICE_VERSION + 1, 'Service version %i has no history. Please update ' 'nova.objects.service.SERVICE_VERSION_HISTORY ' 'and add %s to it' % (service.SERVICE_VERSION, repr(calculated))) current = service.SERVICE_VERSION_HISTORY[service.SERVICE_VERSION] self.assertEqual( current, calculated, 'Changes detected that require a SERVICE_VERSION change. Please ' 'increment nova.objects.service.SERVICE_VERSION') def test_version_in_init(self): self.assertRaises(exception.ObjectActionError, objects.Service, version=123) def test_version_set_on_init(self): self.assertEqual(service.SERVICE_VERSION, objects.Service().version) def test_version_loaded_from_db(self): fake_version = fake_service['version'] + 1 fake_different_service = dict(fake_service) fake_different_service['version'] = fake_version obj = objects.Service() obj._from_db_object(mock.sentinel.context, obj, fake_different_service) self.assertEqual(fake_version, obj.version) def test_save_noop_with_only_version(self): o = objects.Service(context=mock.sentinel.context, id=fake_service['id']) o.obj_reset_changes(['id']) self.assertEqual(set(['version']), o.obj_what_changed()) with mock.patch('nova.db.service_update') as mock_update: o.save() self.assertFalse(mock_update.called) o.host = 'foo' with mock.patch('nova.db.service_update') as mock_update: mock_update.return_value = fake_service o.save() mock_update.assert_called_once_with( mock.sentinel.context, fake_service['id'], {'version': service.SERVICE_VERSION, 'host': 'foo'})
{ "content_hash": "8898b958c753c3b6949ab170b19924e0", "timestamp": "", "source": "github", "line_count": 371, "max_line_length": 79, "avg_line_length": 42.34231805929919, "alnum_prop": 0.592844866000382, "repo_name": "raildo/nova", "id": "9dacdb5358646f3cd9938c894483fc45df5b5687", "size": "16314", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nova/tests/unit/objects/test_service.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "16814792" }, { "name": "Shell", "bytes": "20716" }, { "name": "Smarty", "bytes": "351433" } ], "symlink_target": "" }
import datetime import mock from os_win import constants as os_win_const from oslo_config import cfg from oslo_serialization import jsonutils from oslo_utils import units from jacket.tests.compute.unit.virt.hyperv import test_base from jacket.compute.virt.hyperv import constants from jacket.compute.virt.hyperv import hostops CONF = cfg.CONF class HostOpsTestCase(test_base.HyperVBaseTestCase): """Unit tests for the Hyper-V HostOps class.""" FAKE_ARCHITECTURE = 0 FAKE_NAME = 'fake_name' FAKE_MANUFACTURER = 'FAKE_MANUFACTURER' FAKE_NUM_CPUS = 1 FAKE_INSTANCE_DIR = "C:/fake/dir" FAKE_LOCAL_IP = '10.11.12.13' FAKE_TICK_COUNT = 1000000 def setUp(self): super(HostOpsTestCase, self).setUp() self._hostops = hostops.HostOps() self._hostops._hostutils = mock.MagicMock() self._hostops._pathutils = mock.MagicMock() def test_get_cpu_info(self): mock_processors = mock.MagicMock() info = {'Architecture': self.FAKE_ARCHITECTURE, 'Name': self.FAKE_NAME, 'Manufacturer': self.FAKE_MANUFACTURER, 'NumberOfCores': self.FAKE_NUM_CPUS, 'NumberOfLogicalProcessors': self.FAKE_NUM_CPUS} def getitem(key): return info[key] mock_processors.__getitem__.side_effect = getitem self._hostops._hostutils.get_cpus_info.return_value = [mock_processors] response = self._hostops._get_cpu_info() self._hostops._hostutils.get_cpus_info.assert_called_once_with() expected = [mock.call(fkey) for fkey in os_win_const.PROCESSOR_FEATURE.keys()] self._hostops._hostutils.is_cpu_feature_present.has_calls(expected) expected_response = self._get_mock_cpu_info() self.assertEqual(expected_response, response) def _get_mock_cpu_info(self): return {'vendor': self.FAKE_MANUFACTURER, 'model': self.FAKE_NAME, 'arch': constants.WMI_WIN32_PROCESSOR_ARCHITECTURE[ self.FAKE_ARCHITECTURE], 'features': list(os_win_const.PROCESSOR_FEATURE.values()), 'topology': {'cores': self.FAKE_NUM_CPUS, 'threads': self.FAKE_NUM_CPUS, 'sockets': self.FAKE_NUM_CPUS}} def test_get_memory_info(self): self._hostops._hostutils.get_memory_info.return_value = (2 * units.Ki, 1 * units.Ki) response = self._hostops._get_memory_info() self._hostops._hostutils.get_memory_info.assert_called_once_with() self.assertEqual((2, 1, 1), response) def test_get_local_hdd_info_gb(self): self._hostops._pathutils.get_instances_dir.return_value = '' self._hostops._hostutils.get_volume_info.return_value = (2 * units.Gi, 1 * units.Gi) response = self._hostops._get_local_hdd_info_gb() self._hostops._pathutils.get_instances_dir.assert_called_once_with() self._hostops._hostutils.get_volume_info.assert_called_once_with('') self.assertEqual((2, 1, 1), response) def test_get_hypervisor_version(self): self._hostops._hostutils.get_windows_version.return_value = '6.3.9600' response_lower = self._hostops._get_hypervisor_version() self._hostops._hostutils.get_windows_version.return_value = '10.1.0' response_higher = self._hostops._get_hypervisor_version() self.assertEqual(6003, response_lower) self.assertEqual(10001, response_higher) @mock.patch.object(hostops.HostOps, '_get_cpu_info') @mock.patch.object(hostops.HostOps, '_get_memory_info') @mock.patch.object(hostops.HostOps, '_get_hypervisor_version') @mock.patch.object(hostops.HostOps, '_get_local_hdd_info_gb') @mock.patch('platform.node') def test_get_available_resource(self, mock_node, mock_get_local_hdd_info_gb, mock_get_hypervisor_version, mock_get_memory_info, mock_get_cpu_info): mock_get_local_hdd_info_gb.return_value = (mock.sentinel.LOCAL_GB, mock.sentinel.LOCAL_GB_FREE, mock.sentinel.LOCAL_GB_USED) mock_get_memory_info.return_value = (mock.sentinel.MEMORY_MB, mock.sentinel.MEMORY_MB_FREE, mock.sentinel.MEMORY_MB_USED) mock_cpu_info = self._get_mock_cpu_info() mock_get_cpu_info.return_value = mock_cpu_info mock_get_hypervisor_version.return_value = mock.sentinel.VERSION response = self._hostops.get_available_resource() mock_get_memory_info.assert_called_once_with() mock_get_cpu_info.assert_called_once_with() mock_get_hypervisor_version.assert_called_once_with() expected = {'supported_instances': [("i686", "hyperv", "hvm"), ("x86_64", "hyperv", "hvm")], 'hypervisor_hostname': mock_node(), 'cpu_info': jsonutils.dumps(mock_cpu_info), 'hypervisor_version': mock.sentinel.VERSION, 'memory_mb': mock.sentinel.MEMORY_MB, 'memory_mb_used': mock.sentinel.MEMORY_MB_USED, 'local_gb': mock.sentinel.LOCAL_GB, 'local_gb_used': mock.sentinel.LOCAL_GB_USED, 'vcpus': self.FAKE_NUM_CPUS, 'vcpus_used': 0, 'hypervisor_type': 'hyperv', 'numa_topology': None, } self.assertEqual(expected, response) def _test_host_power_action(self, action): self._hostops._hostutils.host_power_action = mock.Mock() self._hostops.host_power_action(action) self._hostops._hostutils.host_power_action.assert_called_with( action) def test_host_power_action_shutdown(self): self._test_host_power_action(constants.HOST_POWER_ACTION_SHUTDOWN) def test_host_power_action_reboot(self): self._test_host_power_action(constants.HOST_POWER_ACTION_REBOOT) def test_host_power_action_exception(self): self.assertRaises(NotImplementedError, self._hostops.host_power_action, constants.HOST_POWER_ACTION_STARTUP) def test_get_host_ip_addr(self): CONF.set_override('my_ip', None) self._hostops._hostutils.get_local_ips.return_value = [ self.FAKE_LOCAL_IP] response = self._hostops.get_host_ip_addr() self._hostops._hostutils.get_local_ips.assert_called_once_with() self.assertEqual(self.FAKE_LOCAL_IP, response) @mock.patch('time.strftime') def test_get_host_uptime(self, mock_time): self._hostops._hostutils.get_host_tick_count64.return_value = ( self.FAKE_TICK_COUNT) response = self._hostops.get_host_uptime() tdelta = datetime.timedelta(milliseconds=int(self.FAKE_TICK_COUNT)) expected = "%s up %s, 0 users, load average: 0, 0, 0" % ( str(mock_time()), str(tdelta)) self.assertEqual(expected, response)
{ "content_hash": "27199982d848f0de40785843aab5aa57", "timestamp": "", "source": "github", "line_count": 168, "max_line_length": 79, "avg_line_length": 44.285714285714285, "alnum_prop": 0.5896505376344086, "repo_name": "HybridF5/jacket", "id": "58b75d17a880f59c56a0543c2517840ece1314c1", "size": "8079", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "jacket/tests/compute/unit/virt/hyperv/test_hostops.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "26995056" }, { "name": "Shell", "bytes": "28464" }, { "name": "Smarty", "bytes": "291947" } ], "symlink_target": "" }
""" Testing sphinxext module """ import os from os.path import abspath, dirname, join as pjoin import sys from subprocess import check_call _my_path = abspath(dirname(__file__)) doc_path = pjoin(_my_path, 'doc') # path to custom style sys.path.insert(0, _my_path) from .. import bibref as bs from nose.tools import assert_true, assert_equal, assert_raises def test_custom_styles(): cs = bs.custom_styles({}) assert_equal(cs, {}) assert_raises(ImportError, bs.custom_styles, {'something': 'implausible.something'}) cs = bs.custom_styles({'test-style': 'style1'}) assert_true(hasattr(cs['test-style'], 'CitationManager')) # Test nested package.module sys.path.insert(0, pjoin(_my_path, '..')) cs = bs.custom_styles({'test-style': 'tests.style1'}) assert_true(hasattr(cs['test-style'], 'CitationManager')) def test_builds(): # just does dumb builds to check they work without error pwd = os.getcwd() try: os.chdir(doc_path) check_call('make clean', shell=True) check_call('make html', shell=True) check_call('make clean', shell=True) # might not work on windows or systems without latex support check_call('make latex', shell=True) finally: os.chdir(pwd)
{ "content_hash": "bbadbfa816d740f7a2a4b406a393dc2e", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 75, "avg_line_length": 29.977272727272727, "alnum_prop": 0.6345716451857468, "repo_name": "matthew-brett/bibstuff", "id": "3b9dcef107be7f77f95bc0b6c2780bee3af1175a", "size": "1319", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "bibstuff/sphinxext/tests/test_bibref.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "4116" }, { "name": "Makefile", "bytes": "4610" }, { "name": "Python", "bytes": "201506" }, { "name": "TeX", "bytes": "6710" } ], "symlink_target": "" }
import pytest def test_all_have_star_name(SCdata): # Test every entry has a "Star" name df, _ = SCdata assert not df.Star.isnull().values.any() def test_all_have_link(SCdata): # Test each entry has an associated author/paper. df, _ = SCdata null_links = df.link.isnull() # printing is for identifying missing links on failure only print("Stars with missing links:\n") for star in df.Star[null_links].values: print(star) assert not null_links.values.any() # xfail on purpose (to indicate there are missing links only) @pytest.mark.xfail def test_links_are_not_self_generated(SCdata): # Test if any have exoplanet.eu link from generate_missing_links. df, _ = SCdata generated_links = df.link.str.contains("http://exoplanet.eu/catalog/") print(generated_links) # printing is for identifying generated_links on failure only print("Stars with artificial links:\n") for star in df.Star[generated_links].values: print(star) assert not generated_links.values.any() def test_all_have_Author(SCdata): # Test each entry has an associated link to paper. df, _ = SCdata null_author = df.Author.isnull() # printing is for identifying missing author on failure only. print("Stars with missing Author:") for star in df.Star[null_author].values: print(star) assert not null_author.values.any()
{ "content_hash": "70b2092a8757e52396ed7257099e7617", "timestamp": "", "source": "github", "line_count": 44, "max_line_length": 74, "avg_line_length": 32.04545454545455, "alnum_prop": 0.6872340425531915, "repo_name": "DanielAndreasen/SWEETer-Cat", "id": "e019550048ba4464719a6a190304b8776345d1e5", "size": "1454", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sweetercat/tests/test_data.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "528" }, { "name": "HTML", "bytes": "21349" }, { "name": "Procfile", "bytes": "89" }, { "name": "Python", "bytes": "70153" } ], "symlink_target": "" }
from __future__ import unicode_literals import base64 import datetime import mimetypes import six from parse_rest.connection import API_ROOT, ParseBase from parse_rest.query import QueryManager from parse_rest.core import ParseError def complex_type(name=None): '''Decorator for registering complex types''' def wrapped(cls): ParseType.type_mapping[name or cls.__name__] = cls return cls return wrapped class ParseType(object): type_mapping = {} @staticmethod def convert_from_parse(parse_key, parse_data): if isinstance(parse_data, list): return [ParseType.convert_from_parse(parse_key, item) for item in parse_data] parse_type = None if isinstance(parse_data, dict): if '__type' in parse_data: parse_type = parse_data.pop('__type') elif parse_key == 'ACL': parse_type = 'ACL' # if its not a parse type -- simply return it. This means it wasn't a "special class" if not parse_type: return parse_data native = ParseType.type_mapping.get(parse_type) return native.from_native(**parse_data) if native else parse_data @staticmethod def convert_to_parse(python_object, as_pointer=False): is_object = isinstance(python_object, ParseResource) #User is derived from ParseResouce not Object, check against ParseResource if is_object and not as_pointer: return dict([(k, ParseType.convert_to_parse(v, as_pointer=True)) for k, v in python_object._editable_attrs.items() ]) python_type = ParseResource if is_object else type(python_object) # classes that need to be cast to a different type before serialization transformation_map = { datetime.datetime: Date, ParseResource: Pointer } if (hasattr(python_object, '__iter__') and not isinstance(python_object, (six.string_types[0], ParseType))): # It's an iterable? Repeat this whole process on each object if isinstance(python_object, dict): for key, value in python_object.items(): python_object[key]=ParseType.convert_to_parse(value, as_pointer=as_pointer) return python_object else: return [ParseType.convert_to_parse(o, as_pointer=as_pointer) for o in python_object] if python_type in transformation_map: klass = transformation_map.get(python_type) return klass(python_object)._to_native() if isinstance(python_object, ParseType): return python_object._to_native() return python_object @classmethod def from_native(cls, **kw): return cls(**kw) def _to_native(self): raise NotImplementedError("_to_native must be overridden") @complex_type('Pointer') class Pointer(ParseType): @classmethod def from_native(cls, **kw): # create object with only objectId and unloaded flag. it is automatically loaded when any other field is accessed klass = Object.factory(kw.get('className')) return klass(objectId=kw.get('objectId'), _is_loaded=False) def __init__(self, obj): self._object = obj def _to_native(self): return { '__type': 'Pointer', 'className': self._object.className, 'objectId': self._object.objectId } @complex_type('Object') class EmbeddedObject(ParseType): @classmethod def from_native(cls, **kw): klass = Object.factory(kw.pop('className')) return klass(**kw) @complex_type('Relation') class Relation(ParseType): @classmethod def from_native(cls, **kw): return cls(**kw) def with_parent(self, **kw): """The parent calls this if the Relation already exists.""" if 'parentObject' in kw: self.parentObject = kw['parentObject'] self.key = kw['key'] return self def __init__(self, **kw): """Called either via Relation(), or via from_native(). In both cases, the Relation object cannot perform queries until we know what classes are on both sides of the relation. If it's called via from_native, then a later call to with_parent() provides parent information. If it's called as Relation(), the relatedClassName is discovered either on the first added object, or by querying the server to retrieve the schema. """ # Name of the key on the parent object. self.key = None self.parentObject = None self.relatedClassName = None # Called via from_native() if 'className' in kw: self.relatedClassName = kw['className'] # Called via Relation(...) if 'parentObject' in kw: self.parentObject = kw['parentObject'] self.key = kw['key'] def __repr__(self): className = objectId = None if self.parentObject is not None: className = self.parentObject.className objectId = self.parentObject.objectId repr = u'<Relation where %s:%s for %s>' % \ (className, objectId, self.relatedClassName) return repr def _to_native(self): # Saving relations is a separate operation and thus should never need # to convert this field _to_native return None def add(self, objs): """Adds a Parse.Object or an array of Parse.Objects to the relation.""" if type(objs) is not list: objs = [objs] if self.relatedClassName is None: # find the related class from the first object added self.relatedClassName = objs[0].className setattr(self.parentObject, self.key, self) objectsId = [] for obj in objs: if not hasattr(obj, 'objectId') or obj.objectId is None: obj.save() objectsId.append(obj.objectId) self.parentObject.addRelation(self.key, self.relatedClassName, objectsId) def remove(self, objs): """Removes an array of, or one Parse.Object from this relation.""" if type(objs) is not list: objs = [objs] objectsId = [] for obj in objs: if hasattr(obj, 'objectId'): objectsId.append(obj.objectId) self.parentObject.removeRelation(self.key, self.relatedClassName, objectsId) def query(self): """Returns a Parse.Query limited to objects in this relation.""" if self.relatedClassName is None: self._probe_for_relation_class() key = '%s__relatedTo' % (self.key,) kw = {key: self.parentObject} relatedClass = Object.factory(self.relatedClassName) q = relatedClass.Query.all().filter(**kw) return q def _probe_for_relation_class(self): """Retrive the schema from the server to find related class.""" schema = self.parentObject.__class__.schema() fields = schema['fields'] relatedColumn = fields[self.key] columnType = relatedColumn['type'] if columnType == 'Relation': self.relatedClassName = relatedColumn['targetClass'] else: raise ParseError( 'Column type is %s, expected Relation' % (columnType,)) @complex_type() class Date(ParseType): FORMAT = '%Y-%m-%dT%H:%M:%S.%f%Z' @classmethod def from_native(cls, **kw): return cls._from_str(kw.get('iso', '')) @staticmethod def _from_str(date_str): """turn a ISO 8601 string into a datetime object""" return datetime.datetime.strptime(date_str[:-1] + 'UTC', Date.FORMAT) def __init__(self, date): """Can be initialized either with a string or a datetime""" if isinstance(date, datetime.datetime): self._date = date elif isinstance(date, six.string_types): self._date = Date._from_str(date) def _to_native(self): return { #parse expects an iso8601 with 3 digits milliseonds and not 6 '__type': 'Date', 'iso': '{0}Z'.format(self._date.strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]) } @complex_type('Bytes') class Binary(ParseType): @classmethod def from_native(cls, **kw): return cls(kw.get('base64', '')) def __init__(self, encoded_string): self._encoded = encoded_string self._decoded = str(base64.b64decode(self._encoded)) def _to_native(self): return {'__type': 'Bytes', 'base64': self._encoded} @complex_type() class GeoPoint(ParseType): @classmethod def from_native(cls, **kw): return cls(kw.get('latitude'), kw.get('longitude')) def __init__(self, latitude, longitude): self.latitude = latitude self.longitude = longitude def _to_native(self): return { '__type': 'GeoPoint', 'latitude': self.latitude, 'longitude': self.longitude } @complex_type() class File(ParseType, ParseBase): ENDPOINT_ROOT = '/'.join([API_ROOT, 'files']) @classmethod def from_native(cls, **kw): return cls(**kw) def __init__(self, name, content=None, mimetype=None, url=None): self._name = name self._file_url = url self._api_url = '/'.join([API_ROOT, 'files', name]) self._content = content self._mimetype = mimetype or mimetypes.guess_type(name) if not content and not url: with open(name) as f: content = f.read() self._content = content def __repr__(self): return '<File:%s>' % (getattr(self, '_name', None)) def _to_native(self): return { '__type': 'File', 'name': self._name, 'url': self._file_url } def save(self, batch=False): if self.url is not None: raise ParseError("Files can't be overwritten") uri = '/'.join([self.__class__.ENDPOINT_ROOT, self.name]) headers = {'Content-type': self.mimetype} response = self.__class__.POST(uri, extra_headers=headers, batch=batch, _body=self._content) self._file_url = response['url'] self._name = response['name'] self._api_url = '/'.join([API_ROOT, 'files', self._name]) if batch: return response, lambda response_dict: None def delete(self, batch=False): uri = "/".join([self.__class__.ENDPOINT_ROOT, self.name]) response = self.__class__.DELETE(uri, batch=batch) if batch: return response, lambda response_dict: None mimetype = property(lambda self: self._mimetype) url = property(lambda self: self._file_url) name = property(lambda self: self._name) _absolute_url = property(lambda self: self._api_url) @complex_type() class ACL(ParseType): @classmethod def from_native(cls, **kw): return cls(kw) def __init__(self, acl=None): self._acl = acl or {} def _to_native(self): return self._acl def __repr__(self): return '%s(%s)' % (type(self).__name__, repr(self._acl)) def set_default(self, read=False, write=False): self._set_permission("*", read, write) def set_role(self, role, read=False, write=False): if isinstance(role, ParseResource): self._set_permission("role:%s" % role.name, read, write) else: self._set_permission("role:%s" % role, read, write) def set_user(self, user, read=False, write=False): if isinstance(user, ParseResource): self._set_permission(user.objectId, read, write) else: self._set_permission(user, read, write) def set_all(self, permissions): self._acl.clear() for k, v in permissions.items(): self._set_permission(k, **v) def _set_permission(self, name, read=False, write=False): permissions = {} if read is True: permissions["read"] = True if write is True: permissions["write"] = True if len(permissions): self._acl[name] = permissions else: self._acl.pop(name, None) class Function(ParseBase): ENDPOINT_ROOT = '/'.join((API_ROOT, 'functions')) def __init__(self, name): self.name = name def __call__(self, **kwargs): return self.POST('/' + self.name, **kwargs) class Job(ParseBase): ENDPOINT_ROOT = '/'.join((API_ROOT, 'jobs')) def __init__(self, name): self.name = name def __call__(self, **kwargs): return self.POST('/' + self.name, **kwargs) class ParseResource(ParseBase): PROTECTED_ATTRIBUTES = ['objectId', 'createdAt', 'updatedAt'] @property def _editable_attrs(self): protected_attrs = self.__class__.PROTECTED_ATTRIBUTES allowed = lambda a: a not in protected_attrs and not a.startswith('_') return dict([(k, v) for k, v in self.__dict__.items() if allowed(k)]) def __init__(self, **kw): self.objectId = None self._init_attrs(kw) def __getattr__(self, attr): # if object is not loaded and attribute is missing, try to load it if not self.__dict__.get('_is_loaded', True): del self._is_loaded self._init_attrs(self.GET(self._absolute_url)) return object.__getattribute__(self, attr) #preserve default if attr not exists def _init_attrs(self, args): for key, value in six.iteritems(args): # https://github.com/milesrichardson/ParsePy/issues/155 try: setattr(self, key, ParseType.convert_from_parse(key, value)) except AttributeError: continue def _to_native(self): return ParseType.convert_to_parse(self) def _get_updated_datetime(self): return self.__dict__.get('_updated_at') and self._updated_at._date def _set_updated_datetime(self, value): self._updated_at = Date(value) def _get_created_datetime(self): return self.__dict__.get('_created_at') and self._created_at._date def _set_created_datetime(self, value): self._created_at = Date(value) def save(self, batch=False): if self.objectId: return self._update(batch=batch) else: return self._create(batch=batch) def _create(self, batch=False): uri = self.__class__.ENDPOINT_ROOT response = self.__class__.POST(uri, batch=batch, **self._to_native()) def call_back(response_dict): self.createdAt = self.updatedAt = response_dict['createdAt'] self.objectId = response_dict['objectId'] if batch: return response, call_back else: call_back(response) def _update(self, batch=False): response = self.__class__.PUT(self._absolute_url, batch=batch, **self._to_native()) def call_back(response_dict): self.updatedAt = response_dict['updatedAt'] if batch: return response, call_back else: call_back(response) def delete(self, batch=False): response = self.__class__.DELETE(self._absolute_url, batch=batch) if batch: return response, lambda response_dict: None @property def className(self): return self.__class__.__name__ @property def _absolute_url(self): return '%s/%s' % (self.__class__.ENDPOINT_ROOT, self.objectId) createdAt = property(_get_created_datetime, _set_created_datetime) updatedAt = property(_get_updated_datetime, _set_updated_datetime) def __repr__(self): return '<%s:%s>' % (self.__class__.__name__, self.objectId) class ObjectMetaclass(type): def __new__(mcs, name, bases, dct): cls = super(ObjectMetaclass, mcs).__new__(mcs, name, bases, dct) # attr check must be here because of specific six.with_metaclass implemetantion where metaclass is used also for # internal NewBase which hasn't set_endpoint_root method if hasattr(cls, 'set_endpoint_root'): cls.set_endpoint_root() cls.Query = QueryManager(cls) return cls class Object(six.with_metaclass(ObjectMetaclass, ParseResource)): ENDPOINT_ROOT = '/'.join([API_ROOT, 'classes']) @classmethod def factory(cls, class_name): """find proper Object subclass matching class_name system types like _User are mapped to types without underscore (parse_resr.user.User) If user don't declare matching type, class is created on the fly """ class_name = str(class_name.lstrip('_')) types = ParseResource.__subclasses__() while types: t = types.pop() if t.__name__ == class_name: return t types.extend(t.__subclasses__()) else: return type(class_name, (Object,), {}) @classmethod def set_endpoint_root(cls): root = '/'.join([API_ROOT, 'classes', cls.__name__]) if cls.ENDPOINT_ROOT != root: cls.ENDPOINT_ROOT = root return cls.ENDPOINT_ROOT @classmethod def schema(cls): """Retrieves the class' schema.""" root = '/'.join([API_ROOT, 'schemas', cls.__name__]) schema = cls.GET(root) return schema @classmethod def schema_delete_field(cls, key): """Deletes a field.""" root = '/'.join([API_ROOT, 'schemas', cls.__name__]) payload = { 'className': cls.__name__, 'fields': { key: { '__op': 'Delete' } } } cls.PUT(root, **payload) @property def _absolute_url(self): if not self.objectId: return None return '/'.join([self.__class__.ENDPOINT_ROOT, self.objectId]) @property def as_pointer(self): return Pointer(self) def increment(self, key, amount=1): """ Increment one value in the object. Note that this happens immediately: it does not wait for save() to be called """ payload = { key: { '__op': 'Increment', 'amount': amount } } self.__class__.PUT(self._absolute_url, **payload) self.__dict__[key] += amount def remove(self, key): """ Clear a column value in the object. Note that this happens immediately: it does not wait for save() to be called. """ payload = { key: { '__op': 'Delete' } } self.__class__.PUT(self._absolute_url, **payload) del self.__dict__[key] def removeRelation(self, key, className, objectsId): self.manageRelation('RemoveRelation', key, className, objectsId) def addRelation(self, key, className, objectsId): self.manageRelation('AddRelation', key, className, objectsId) def manageRelation(self, action, key, className, objectsId): objects = [{ "__type": "Pointer", "className": className, "objectId": objectId } for objectId in objectsId] payload = { key: { "__op": action, "objects": objects } } self.__class__.PUT(self._absolute_url, **payload) # self.__dict__[key] = '' def relation(self, key): if not hasattr(self, key): return Relation(parentObject=self, key=key) try: return getattr(self, key).with_parent(parentObject=self, key=key) except: raise ParseError("Column '%s' is not a Relation." % (key,))
{ "content_hash": "37b53ba7406ea880cf06d02d6e452b4e", "timestamp": "", "source": "github", "line_count": 625, "max_line_length": 135, "avg_line_length": 32.2432, "alnum_prop": 0.5721020246129417, "repo_name": "alacroix/ParsePy", "id": "46de98aa31355f38ce02e90ce782363828d050e5", "size": "20816", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "parse_rest/datatypes.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1150" }, { "name": "Python", "bytes": "74832" } ], "symlink_target": "" }
""" :mod:`nova` -- Cloud IaaS Platform =================================== .. automodule:: nova :platform: Unix :synopsis: Infrastructure-as-a-Service Cloud platform. .. moduleauthor:: Jesse Andrews <jesse@ansolabs.com> .. moduleauthor:: Devin Carlen <devin.carlen@gmail.com> .. moduleauthor:: Vishvananda Ishaya <vishvananda@gmail.com> .. moduleauthor:: Joshua McKenty <joshua@cognition.ca> .. moduleauthor:: Manish Singh <yosh@gimp.org> .. moduleauthor:: Andy Smith <andy@anarkystic.com> """ import gettext gettext.install("nova", unicode=1)
{ "content_hash": "5af0f6578ce1451484a077522e47c8c6", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 60, "avg_line_length": 29.157894736842106, "alnum_prop": 0.6823104693140795, "repo_name": "KarimAllah/nova", "id": "f2e9684d3033f7136430a69f44b7bda768ff6b00", "size": "1331", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "nova/__init__.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "JavaScript", "bytes": "7412" }, { "name": "Python", "bytes": "5419134" }, { "name": "Shell", "bytes": "24506" } ], "symlink_target": "" }
"""Provides access to the extensions of a program.""" import importlib import tensorflow as tf import vesper.birdvox.detectors as birdvox_detectors import vesper.util.yaml_utils as yaml_utils # Note that even though the `ExtensionManager` class is typically used as a # singleton, we make it a class rather than a module to facilitate testing. # # Note also that rather than loading extensions eagerly in the `__init__` # method, an `ExtensionManager` instead loads them lazily in its # `get_extensions` method. Loading extensions in the `__init__` method # would not work since we want to allow extension modules to use the # extension manager on import, but it is not available until after its # `__init__` method executes. # # The `get_extensions` method also loads extensions only of the requested # type. This avoids loading extensions of types that are never used. # TODO: Would it be possible to load extension modules and their # dependencies only when they are actually used? This might be # accomplished by separating extension metadata from code modules and # only loading the code modules when they are actually needed. This # would help avoid unnecessary and undesirable imports in some cases. # TODO: Use a hierarchical name space for plugins, extension points, and # extensions? _TF_VERSION = int(tf.__version__.split('.')[0]) _TF1_CLASSIFIERS = ''' - vesper.mpg_ranch.nfc_coarse_classifier_2_1.classifier.Classifier - vesper.mpg_ranch.nfc_coarse_classifier_3_0.classifier.Classifier - vesper.mpg_ranch.nfc_coarse_classifier_4_0.classifier.Classifier ''' _TF1_DETECTORS = ''' # MPG Ranch Thrush Detector 0.0 - vesper.mpg_ranch.nfc_detector_0_0.detector.ThrushDetector - vesper.mpg_ranch.nfc_detector_0_0.detector.ThrushDetector40 - vesper.mpg_ranch.nfc_detector_0_0.detector.ThrushDetector50 - vesper.mpg_ranch.nfc_detector_0_0.detector.ThrushDetector60 - vesper.mpg_ranch.nfc_detector_0_0.detector.ThrushDetector70 - vesper.mpg_ranch.nfc_detector_0_0.detector.ThrushDetector80 - vesper.mpg_ranch.nfc_detector_0_0.detector.ThrushDetector90 # MPG Ranch Tseep Detector 0.0 - vesper.mpg_ranch.nfc_detector_0_0.detector.TseepDetector - vesper.mpg_ranch.nfc_detector_0_0.detector.TseepDetector40 - vesper.mpg_ranch.nfc_detector_0_0.detector.TseepDetector50 - vesper.mpg_ranch.nfc_detector_0_0.detector.TseepDetector60 - vesper.mpg_ranch.nfc_detector_0_0.detector.TseepDetector70 - vesper.mpg_ranch.nfc_detector_0_0.detector.TseepDetector80 - vesper.mpg_ranch.nfc_detector_0_0.detector.TseepDetector90 # MPG Ranch Thrush Detector 0.1 - vesper.mpg_ranch.nfc_detector_0_1.detector.ThrushDetector - vesper.mpg_ranch.nfc_detector_0_1.detector.ThrushDetector40 - vesper.mpg_ranch.nfc_detector_0_1.detector.ThrushDetector50 - vesper.mpg_ranch.nfc_detector_0_1.detector.ThrushDetector60 - vesper.mpg_ranch.nfc_detector_0_1.detector.ThrushDetector70 - vesper.mpg_ranch.nfc_detector_0_1.detector.ThrushDetector80 - vesper.mpg_ranch.nfc_detector_0_1.detector.ThrushDetector90 # MPG Ranch Tseep Detector 0.1 - vesper.mpg_ranch.nfc_detector_0_1.detector.TseepDetector - vesper.mpg_ranch.nfc_detector_0_1.detector.TseepDetector40 - vesper.mpg_ranch.nfc_detector_0_1.detector.TseepDetector50 - vesper.mpg_ranch.nfc_detector_0_1.detector.TseepDetector60 - vesper.mpg_ranch.nfc_detector_0_1.detector.TseepDetector70 - vesper.mpg_ranch.nfc_detector_0_1.detector.TseepDetector80 - vesper.mpg_ranch.nfc_detector_0_1.detector.TseepDetector90 # MPG Ranch Thrush Detector 1.0 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector20 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector30 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector40 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector50 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector60 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector70 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector80 - vesper.mpg_ranch.nfc_detector_1_0.detector.ThrushDetector90 # MPG Ranch Tseep Detector 1.0 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector20 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector30 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector40 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector50 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector60 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector70 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector80 - vesper.mpg_ranch.nfc_detector_1_0.detector.TseepDetector90 ''' _TF2_CLASSIFIERS = ''' # MPG Ranch - vesper.mpg_ranch.nfc_bounding_interval_annotator_1_0.annotator.Annotator - vesper.mpg_ranch.nfc_coarse_classifier_3_1.classifier.Classifier - vesper.mpg_ranch.nfc_coarse_classifier_4_1.classifier.Classifier # PSW - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier10 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier20 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier30 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier40 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier50 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier60 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier70 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier80 - vesper.psw.nogo_coarse_classifier_0_0.classifier.Classifier90 ''' _TF2_DETECTORS = ''' # MPG Ranch Thrush Detector 1.1 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector20 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector30 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector40 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector50 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector60 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector70 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector70_25 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector70_12 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector80 - vesper.mpg_ranch.nfc_detector_1_1.detector.ThrushDetector90 # MPG Ranch Tseep Detector 1.1 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector20 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector30 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector40 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector50 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector60 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector60_25 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector60_12 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector70 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector80 - vesper.mpg_ranch.nfc_detector_1_1.detector.TseepDetector90 ''' if _TF_VERSION == 1: _TF_CLASSIFIERS = _TF1_CLASSIFIERS _TF_DETECTORS = _TF1_DETECTORS else: _TF_CLASSIFIERS = _TF2_CLASSIFIERS _TF_DETECTORS = _TF2_DETECTORS _EXTENSION_SPEC = f''' Classifier: {_TF_CLASSIFIERS} - vesper.mpg_ranch.nfc_detector_low_score_classifier_1_0.classifier.Classifier - vesper.mpg_ranch.outside_classifier.OutsideClassifier - vesper.old_bird.lighthouse_outside_classifier.LighthouseOutsideClassifier Command: - vesper.command.add_recording_audio_files_command.AddRecordingAudioFilesCommand - vesper.command.classify_command.ClassifyCommand - vesper.command.create_clip_audio_files_command.CreateClipAudioFilesCommand - vesper.command.create_random_clips_command.CreateRandomClipsCommand - vesper.command.delete_clip_audio_files_command.DeleteClipAudioFilesCommand - vesper.command.delete_clips_command.DeleteClipsCommand - vesper.command.delete_recordings_command.DeleteRecordingsCommand - vesper.command.detect_command.DetectCommand - vesper.command.execute_deferred_actions_command.ExecuteDeferredActionsCommand - vesper.command.export_clip_counts_by_classification_to_csv_file_command.ExportClipCountsByClassificationToCsvFileCommand - vesper.command.export_clip_counts_by_tag_to_csv_file_command.ExportClipCountsByTagToCsvFileCommand - vesper.command.export_command.ExportCommand - vesper.command.import_command.ImportCommand - vesper.command.refresh_recording_audio_file_paths_command.RefreshRecordingAudioFilePathsCommand - vesper.command.tag_clips_command.TagClipsCommand - vesper.command.test_command.TestCommand - vesper.command.transfer_clip_classifications_command.TransferClipClassificationsCommand - vesper.command.untag_clips_command.UntagClipsCommand - vesper.old_bird.add_old_bird_clip_start_indices_command.AddOldBirdClipStartIndicesCommand Detector: {_TF_DETECTORS} # Old Bird redux detectors 1.0 - vesper.old_bird.old_bird_detector_redux_1_0.ThrushDetector - vesper.old_bird.old_bird_detector_redux_1_0.TseepDetector # Old Bird redux detectors 1.1 - vesper.old_bird.old_bird_detector_redux_1_1.ThrushDetector - vesper.old_bird.old_bird_detector_redux_1_1.TseepDetector # Pacific Southwest (PSW) Research Station detectors - vesper.psw.nogo_detector_0_0.detector.Detector Exporter: - vesper.command.clip_audio_file_exporter.ClipAudioFileExporter - vesper.command.clip_hdf5_file_exporter.ClipHdf5FileExporter - vesper.command.clip_metadata_csv_file_exporter.ClipMetadataCsvFileExporter Importer: - vesper.command.metadata_importer.MetadataImporter - vesper.command.recording_importer.RecordingImporter - vesper.old_bird.clip_importer.ClipImporter Preset: - vesper.command.clip_audio_file_export_settings_preset.ClipAudioFileExportSettingsPreset - vesper.command.clip_hdf5_file_export_settings_preset.ClipHdf5FileExportSettingsPreset - vesper.command.clip_table_format_preset.ClipTableFormatPreset - vesper.command.detection_schedule_preset.DetectionSchedulePreset - vesper.command.station_name_aliases_preset.StationNameAliasesPreset - vesper.django.app.clip_album_commands_preset.ClipAlbumCommandsPreset - vesper.django.app.clip_album_settings_preset.ClipAlbumSettingsPreset Recording File Parser: - vesper.mpg_ranch.recording_file_parser.RecordingFileParser Clip File Name Formatter: - vesper.command.clip_audio_file_exporter.SimpleClipFileNameFormatter ''' class ExtensionManager: def __init__(self, extension_spec=_EXTENSION_SPEC): self._extension_spec = yaml_utils.load(extension_spec) # Create extension dictionary that includes an item for each # extension point name, but don't attempt to load any extensions. # Loading doesn't happen until extensions are actually requested. extension_point_names = sorted(self._extension_spec.keys()) self._extensions = dict((name, None) for name in extension_point_names) def get_extensions(self, extension_point_name): try: extensions = self._extensions[extension_point_name] except KeyError: raise ValueError( f'Unrecognized extension point name "{extension_point_name}".') if extensions is None: # extensions for this extension point not yet loaded extensions = self._load_extensions(extension_point_name) self._extensions[extension_point_name] = extensions # self._show_loaded_extensions(extension_point_name) return dict((e.extension_name, e) for e in extensions) def _load_extensions(self, extension_point_name): module_class_names = self._extension_spec[extension_point_name] extensions = [_load_extension(name) for name in module_class_names] if extension_point_name == 'Detector': # Load BirdVoxDetect detector extensions. These classes # are created dynamically according to the detectors # listed in the archive database, and thus cannot be # specified via YAML. Note that when Vesper's new plugin # infrastructure is complete, the extension manager will # need no special knowledge to get BirdVox detectors, but # rather will discover them at load time just like all # other plugins. extensions += birdvox_detectors.get_detector_classes() return extensions def _show_loaded_extensions(self, extension_point_name): print(f'Loaded "{extension_point_name}" extensions:') extensions = self._extensions[extension_point_name] names = sorted(e.extension_name for e in extensions) for name in names: print(f' {name}') def _load_extension(module_class_name): module_name, class_name = module_class_name.rsplit('.', 1) module = importlib.import_module(module_name) return getattr(module, class_name)
{ "content_hash": "1a970737652b919c0e2e1f86881c5320", "timestamp": "", "source": "github", "line_count": 305, "max_line_length": 126, "avg_line_length": 44.337704918032784, "alnum_prop": 0.7355616357317163, "repo_name": "HaroldMills/Vesper", "id": "39be4a71569f4a8377d504901ca427c29699377e", "size": "13523", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vesper/util/extension_manager.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "92" }, { "name": "CSS", "bytes": "9101" }, { "name": "Dockerfile", "bytes": "1678" }, { "name": "HTML", "bytes": "70614" }, { "name": "JavaScript", "bytes": "410277" }, { "name": "Python", "bytes": "2697554" }, { "name": "Shell", "bytes": "2772" }, { "name": "TypeScript", "bytes": "30001" } ], "symlink_target": "" }
"""Collection for Entrez gene records given an entrez gene id most data fetched from ncbi entrez gene""" import requests import time from snovault import ( collection, load_schema, calculated_property ) from snovault.validators import ( validate_item_content_post, validate_item_content_patch, validate_item_content_put, ) from pyramid.view import view_config from .base import ( Item, lab_award_attribution_embed_list ) ################################################ # Outside methods for online data fetch ################################################ def get_gene_info_from_response_text(response): ''' use NCBI url rather than eutils so we can get tabular report get the stuff between the pre tag ''' if '<pre>' not in response or '</pre>' not in response: # misformatted or empty response return {} if 'Error occurred' in response: # id not found or other bad response return{} _, info, _ = response.split('pre>') info = info.replace('</', '') lines = info.split('\n') if len(lines) != 2: # pre section should only contain 2 lines - header and values return {} fields = lines[0].split('\t') values = lines[1].split('\t') return dict(zip(fields, values)) def fetch_gene_info_from_ncbi(geneid): url = "https://www.ncbi.nlm.nih.gov/gene/{id}".format(id=geneid) NCBI = url + "?report=tabular&format=text" for count in range(5): resp = requests.get(NCBI) if resp.status_code == 200: break if resp.status_code == 429: # pragma: no cover time.sleep(3) continue if count == 4: # pragma: no cover return {} text = resp.text gene_info = get_gene_info_from_response_text(text) syns = gene_info.get('Aliases') if syns: gene_info['Aliases'] = [s.strip() for s in syns.split(',')] if gene_info: gene_info['url'] = url return gene_info def map_ncbi2schema(geneinfo): ''' Mapping of NCBI field names to corresponding 4dn schema properties ''' field_map = {'tax_id': 'organism', 'Status': 'ncbi_entrez_status', 'Symbol': 'official_symbol', 'Aliases': 'synonyms', 'description': 'fullname', 'url': 'url'} return {field_map[k]: v for k, v in geneinfo.items() if k in field_map and v} @collection( name='genes', unique_key='gene:geneid', lookup_key='preferred_symbol', properties={ 'title': 'Genes', 'description': 'Entrez gene items', }) class Gene(Item): """Gene class.""" item_type = 'gene' name_key = 'geneid' schema = load_schema('encoded:schemas/gene.json') embedded_list = lab_award_attribution_embed_list + [ # Organism embed 'organism.scientific_name', 'organism.name' ] def _update(self, properties, sheets=None): # fetch info from ncbi gene based on id provided geneid = properties.get('geneid') gene_info = {} try: gene_info = fetch_gene_info_from_ncbi(geneid) except Exception: pass if gene_info: gene_info = map_ncbi2schema(gene_info) if 'organism' in gene_info: try: # make sure the organism is in the db gene_info['organism'] = str(self.registry['collections']['Organism'].get(gene_info['organism']).uuid) except Exception: # otherwise remove the organism del gene_info['organism'] properties.update(gene_info) if properties.get('preferred_symbol', None) is None: symbol = properties.get('official_symbol') if symbol: properties['preferred_symbol'] = symbol super(Gene, self)._update(properties, sheets) return @calculated_property(schema={ "title": "Display Title", "description": "A calculated title for every object in 4DN", "type": "string" }) def display_title(self, request, geneid, preferred_symbol=None): if preferred_symbol: return preferred_symbol return 'GENE ID:{}'.format(geneid) class Collection(Item.Collection): pass
{ "content_hash": "d73ff7b6d11af8f5a4025e506735d9d8", "timestamp": "", "source": "github", "line_count": 135, "max_line_length": 121, "avg_line_length": 31.91111111111111, "alnum_prop": 0.5807799442896936, "repo_name": "hms-dbmi/fourfront", "id": "4534d6d4a3754e77a99ad84742dac1d917e5c162", "size": "4308", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/encoded/types/gene.py", "mode": "33188", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "741" }, { "name": "CSS", "bytes": "198339" }, { "name": "Cucumber", "bytes": "16918" }, { "name": "HTML", "bytes": "371973" }, { "name": "JavaScript", "bytes": "1403972" }, { "name": "Makefile", "bytes": "110" }, { "name": "PLpgSQL", "bytes": "12067" }, { "name": "Python", "bytes": "751772" }, { "name": "Ruby", "bytes": "1066" }, { "name": "Shell", "bytes": "2248" } ], "symlink_target": "" }
from __future__ import absolute_import from django.conf import settings from django.conf.urls import url from wagtail.wagtailimages.views.serve import ServeView from .api import router from .views import image urlpatterns = [ url(r'^api/v1/', router.urls), url('^images/(?P<pk>\d+)/(?P<specs>.+)$', image, name='image'), url(r'^images/([^/]*)/(\d*)/([^/]*)/[^/]*$', ServeView.as_view(), name='wagtailimages_serve'), ] if settings.DEBUG: from django.views.generic import TemplateView # Add views for testing 404 and 500 templates urlpatterns += [ url(r'^404/$', TemplateView.as_view(template_name='404.html')), url(r'^500/$', TemplateView.as_view(template_name='500.html')), ]
{ "content_hash": "fe7b236c274307bdad9719fda90b04f3", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 98, "avg_line_length": 31.52173913043478, "alnum_prop": 0.6537931034482759, "repo_name": "apihackers/wapps", "id": "54d04219acfd062a8fdd702d83fb5c85a241d38f", "size": "725", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "wapps/urls.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "935" }, { "name": "HTML", "bytes": "6935" }, { "name": "JavaScript", "bytes": "5969" }, { "name": "Python", "bytes": "183616" }, { "name": "Shell", "bytes": "818" }, { "name": "Vue", "bytes": "1969" } ], "symlink_target": "" }
"""Polish-specific form helpers.""" import datetime import re from django.forms import ValidationError from django.forms.fields import RegexField, Select from django.utils.translation import gettext_lazy as _ from .pl_administrativeunits import ADMINISTRATIVE_UNIT_CHOICES from .pl_voivodeships import VOIVODESHIP_CHOICES class PLProvinceSelect(Select): """A select widget with list of Polish administrative provinces as choices.""" def __init__(self, attrs=None): super().__init__(attrs, choices=VOIVODESHIP_CHOICES) class PLCountySelect(Select): """A select widget with list of Polish administrative units as choices.""" def __init__(self, attrs=None): super().__init__(attrs, choices=ADMINISTRATIVE_UNIT_CHOICES) class PLPESELField(RegexField): """ A form field that validates as Polish Identification Number (PESEL). Checks the following rules: * the length consist of 11 digits * has a valid checksum * contains a valid birth date The algorithm is documented at http://en.wikipedia.org/wiki/PESEL. .. versionchanged:: 1.4 """ default_error_messages = { 'invalid': _('National Identification Number consists of 11 digits.'), 'checksum': _('Wrong checksum for the National Identification Number.'), 'birthdate': _('The National Identification Number contains an invalid birth date.'), } def __init__(self, **kwargs): super().__init__(r'^\d{11}$', **kwargs) def clean(self, value): value = super().clean(value) if value in self.empty_values: return value if not self.has_valid_checksum(value): raise ValidationError(self.error_messages['checksum'], code='checksum') if not self.has_valid_birth_date(value): raise ValidationError(self.error_messages['birthdate'], code='birthdate') return '%s' % value def has_valid_checksum(self, number): """Calculates a checksum with the provided algorithm.""" multiple_table = (1, 3, 7, 9, 1, 3, 7, 9, 1, 3, 1) result = 0 for i, digit in enumerate(number): result += int(digit) * multiple_table[i] return result % 10 == 0 def has_valid_birth_date(self, number): """ Checks whether the birth date encoded in PESEL is valid. """ y = int(number[:2]) m = int(number[2:4]) d = int(number[4:6]) md2century = {80: 1800, 0: 1900, 20: 2000, 40: 2100, 60: 2200} for md, cent in md2century.items(): if 1 <= m - md <= 12: y += cent m -= md break try: self.birth_date = datetime.date(y, m, d) return True except ValueError: return False class PLNationalIDCardNumberField(RegexField): """ A form field that validates as Polish National ID Card Number. Checks the following rules: * the length consist of 3 letter and 6 digits * has a valid checksum The algorithm is documented at http://en.wikipedia.org/wiki/Polish_identity_card. """ default_error_messages = { 'invalid': _('National ID Card Number consists of 3 letters and 6 digits.'), 'checksum': _('Wrong checksum for the National ID Card Number.'), } def __init__(self, **kwargs): super().__init__(r'^[A-Za-z]{3}\d{6}$', **kwargs) def clean(self, value): value = super().clean(value) if value in self.empty_values: return value value = value.upper() if not self.has_valid_checksum(value): raise ValidationError(self.error_messages['checksum'], code='checksum') return '%s' % value def has_valid_checksum(self, number): """Calculates a checksum with the provided algorithm.""" letter_dict = {'A': 10, 'B': 11, 'C': 12, 'D': 13, 'E': 14, 'F': 15, 'G': 16, 'H': 17, 'I': 18, 'J': 19, 'K': 20, 'L': 21, 'M': 22, 'N': 23, 'O': 24, 'P': 25, 'Q': 26, 'R': 27, 'S': 28, 'T': 29, 'U': 30, 'V': 31, 'W': 32, 'X': 33, 'Y': 34, 'Z': 35} # convert letters to integer values int_table = [(not c.isdigit()) and letter_dict[c] or int(c) for c in number] multiple_table = (7, 3, 1, -1, 7, 3, 1, 7, 3) result = 0 for i, digit in enumerate(int_table): result += int(digit) * multiple_table[i] return result % 10 == 0 class PLNIPField(RegexField): """ A form field that validates as Polish Tax Number (NIP). Valid forms are: XXX-YYY-YY-YY, XXX-YY-YY-YYY or XXXYYYYYYY. Checksum algorithm based on documentation at http://wipos.p.lodz.pl/zylla/ut/nip-rego.html """ default_error_messages = { 'invalid': _('Enter a tax number field (NIP) in the format XXX-XXX-XX-XX, XXX-XX-XX-XXX or XXXXXXXXXX.'), 'checksum': _('Wrong checksum for the Tax Number (NIP).'), } def __init__(self, **kwargs): super().__init__( r'^\d{3}-\d{3}-\d{2}-\d{2}$|^\d{3}-\d{2}-\d{2}-\d{3}$|^\d{10}$', **kwargs ) def clean(self, value): value = super().clean(value) if value in self.empty_values: return value value = re.sub("[-]", "", value) if not self.has_valid_checksum(value): raise ValidationError(self.error_messages['checksum'], code='checksum') return '%s' % value def has_valid_checksum(self, number): """Calculates a checksum with the provided algorithm.""" multiple_table = (6, 5, 7, 2, 3, 4, 5, 6, 7) result = 0 for i, digit in enumerate(number[:-1]): result += int(digit) * multiple_table[i] result %= 11 return result == int(number[-1]) class PLREGONField(RegexField): """ A form field that validates its input is a REGON number. Valid regon number consists of 9 or 14 digits. See http://www.stat.gov.pl/bip/regon_ENG_HTML.htm for more information. """ default_error_messages = { 'invalid': _('National Business Register Number (REGON) consists of 9 or 14 digits.'), 'checksum': _('Wrong checksum for the National Business Register Number (REGON).'), } def __init__(self, **kwargs): super().__init__(r'^\d{9,14}$', **kwargs) def clean(self, value): value = super().clean(value) if value in self.empty_values: return value if not self.has_valid_checksum(value): raise ValidationError(self.error_messages['checksum'], code='checksum') return '%s' % value def has_valid_checksum(self, number): """Calculates a checksum with the provided algorithm.""" weights = ( (8, 9, 2, 3, 4, 5, 6, 7, -1), (2, 4, 8, 5, 0, 9, 7, 3, 6, 1, 2, 4, 8, -1), (8, 9, 2, 3, 4, 5, 6, 7, -1, 0, 0, 0, 0, 0), ) weights = [table for table in weights if len(table) == len(number)] for table in weights: checksum = sum([int(n) * w for n, w in zip(number, table)]) mod_result = checksum % 11 if mod_result == 10 and number[-1] != '0': return False if mod_result % 10: return False return bool(weights) class PLPostalCodeField(RegexField): """ A form field that validates as Polish postal code. Valid code is XX-XXX where X is digit. """ default_error_messages = { 'invalid': _('Enter a postal code in the format XX-XXX.'), } def __init__(self, **kwargs): super().__init__(r'^\d{2}-\d{3}$', **kwargs)
{ "content_hash": "d03d40d5f6b860a202817d44f066b58f", "timestamp": "", "source": "github", "line_count": 241, "max_line_length": 113, "avg_line_length": 32.531120331950206, "alnum_prop": 0.5644132653061225, "repo_name": "rsalmaso/django-localflavor", "id": "8dd86919573cf8dbb76361096794074d7ec4be19", "size": "7840", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "localflavor/pl/forms.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "906251" } ], "symlink_target": "" }
from runner.koan import * class AboutLists(Koan): def test_creating_lists(self): empty_list = list() self.assertEqual(list, type(empty_list)) self.assertEqual(0, len(empty_list)) def test_list_literals(self): nums = list() self.assertEqual([], nums) nums[0:] = [1] self.assertEqual([1], nums) nums[1:] = [2] self.assertEqual([1, 2], nums) nums.append(333) self.assertEqual([1, 2, 333], nums) def test_accessing_list_elements(self): noms = ['peanut', 'butter', 'and', 'jelly'] self.assertEqual('peanut', noms[0]) self.assertEqual('jelly', noms[3]) self.assertEqual('jelly', noms[-1]) self.assertEqual('butter', noms[-3]) def test_slicing_lists(self): noms = ['peanut', 'butter', 'and', 'jelly'] self.assertEqual(['peanut'], noms[0:1]) self.assertEqual(['peanut', 'butter'], noms[0:2]) self.assertEqual([], noms[2:2]) self.assertEqual(['and', 'jelly'], noms[2:20]) self.assertEqual([], noms[4:0]) self.assertEqual([], noms[4:100]) self.assertEqual([], noms[5:0]) def test_slicing_to_the_edge(self): noms = ['peanut', 'butter', 'and', 'jelly'] self.assertEqual(['and', 'jelly'], noms[2:]) self.assertEqual(['peanut', 'butter'], noms[:2]) def test_lists_and_ranges(self): self.assertEqual(list, type(range(5))) self.assertEqual([0,1,2,3,4], range(5)) self.assertEqual([5,6,7,8], range(5, 9)) def test_ranges_with_steps(self): self.assertEqual([0,2,4,6], range(0, 8, 2)) self.assertEqual([1,4,7], range(1, 8, 3)) self.assertEqual([5,1,-3], range(5, -7, -4)) self.assertEqual([5,1,-3,-7], range(5, -8, -4)) def test_insertions(self): knight = ['you', 'shall', 'pass'] knight.insert(2, 'not') self.assertEqual(['you', 'shall', 'not', 'pass'], knight) knight.insert(0, 'Arthur') self.assertEqual(['Arthur', 'you', 'shall', 'not', 'pass'], knight) def test_popping_lists(self): stack = [10, 20, 30, 40] stack.append('last') self.assertEqual([10, 20, 30, 40, 'last'], stack) popped_value = stack.pop() self.assertEqual('last', popped_value) self.assertEqual([10, 20, 30, 40], stack) popped_value = stack.pop(1) self.assertEqual(20, popped_value) self.assertEqual([10, 30, 40], stack) # Notice that there is a "pop" but no "push" in python? # Part of the Python philosophy is that there ideally should be one and # only one way of doing anything. A 'push' is the same as an 'append'. # To learn more about this try typing "import this" from the python # console... ;) def test_use_deques_for_making_queues(self): from collections import deque queue = deque([1, 2]) queue.append('last') self.assertEqual([1, 2, 'last'], list(queue)) popped_value = queue.popleft() self.assertEqual(1, popped_value) self.assertEqual([2, 'last'], list(queue))
{ "content_hash": "c4f0dcb2ffcaa0bb237845c95930c651", "timestamp": "", "source": "github", "line_count": 99, "max_line_length": 79, "avg_line_length": 33.45454545454545, "alnum_prop": 0.5410628019323671, "repo_name": "jrumball/PyKoans", "id": "37abdabb3bb2bc112123346cabb3401b5549ad5e", "size": "3405", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "python 2/koans/about_lists.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "328315" }, { "name": "Shell", "bytes": "80" } ], "symlink_target": "" }
import logging import zlib import io from .exceptions import DecodeError from .packages.six import string_types as basestring, binary_type from .util import is_fp_closed log = logging.getLogger(__name__) class DeflateDecoder(object): def __init__(self): self._first_try = True self._data = binary_type() self._obj = zlib.decompressobj() def __getattr__(self, name): return getattr(self._obj, name) def decompress(self, data): if not self._first_try: return self._obj.decompress(data) self._data += data try: return self._obj.decompress(data) except zlib.error: self._first_try = False self._obj = zlib.decompressobj(-zlib.MAX_WBITS) try: return self.decompress(self._data) finally: self._data = None def _get_decoder(mode): if mode == 'gzip': return zlib.decompressobj(16 + zlib.MAX_WBITS) return DeflateDecoder() class HTTPResponse(io.IOBase): """ HTTP Response container. Backwards-compatible to httplib's HTTPResponse but the response ``body`` is loaded and decoded on-demand when the ``data`` property is accessed. Extra parameters for behaviour not present in httplib.HTTPResponse: :param preload_content: If True, the response's body will be preloaded during construction. :param decode_content: If True, attempts to decode specific content-encoding's based on headers (like 'gzip' and 'deflate') will be skipped and raw data will be used instead. :param original_response: When this HTTPResponse wrapper is generated from an httplib.HTTPResponse object, it's convenient to include the original for debug purposes. It's otherwise unused. """ CONTENT_DECODERS = ['gzip', 'deflate'] def __init__(self, body='', headers=None, status=0, version=0, reason=None, strict=0, preload_content=True, decode_content=True, original_response=None, pool=None, connection=None): self.headers = headers or {} self.status = status self.version = version self.reason = reason self.strict = strict self.decode_content = decode_content self._decoder = None self._body = body if body and isinstance(body, basestring) else None self._fp = None self._original_response = original_response self._pool = pool self._connection = connection if hasattr(body, 'read'): self._fp = body if preload_content and not self._body: self._body = self.read(decode_content=decode_content) def get_redirect_location(self): """ Should we redirect and where to? :returns: Truthy redirect location string if we got a redirect status code and valid location. ``None`` if redirect status and no location. ``False`` if not a redirect status code. """ if self.status in [301, 302, 303, 307]: return self.headers.get('location') return False def release_conn(self): if not self._pool or not self._connection: return self._pool._put_conn(self._connection) self._connection = None @property def data(self): # For backwords-compat with earlier urllib3 0.4 and earlier. if self._body: return self._body if self._fp: return self.read(cache_content=True) def read(self, amt=None, decode_content=None, cache_content=False): """ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional parameters: ``decode_content`` and ``cache_content``. :param amt: How much of the content to read. If specified, caching is skipped because it doesn't make sense to cache partial content as the full response. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. :param cache_content: If True, will save the returned data such that the same result is returned despite of the state of the underlying file object. This is useful if you want the ``.data`` property to continue working after having ``.read()`` the file object. (Overridden if ``amt`` is set.) """ # Note: content-encoding value should be case-insensitive, per RFC 2616 # Section 3.5 content_encoding = self.headers.get('content-encoding', '').lower() if self._decoder is None: if content_encoding in self.CONTENT_DECODERS: self._decoder = _get_decoder(content_encoding) if decode_content is None: decode_content = self.decode_content if self._fp is None: return flush_decoder = False try: if amt is None: # cStringIO doesn't like amt=None data = self._fp.read() flush_decoder = True else: cache_content = False data = self._fp.read(amt) if amt != 0 and not data: # Platform-specific: Buggy versions of Python. # Close the connection when no data is returned # # This is redundant to what httplib/http.client _should_ # already do. However, versions of python released before # December 15, 2012 (http://bugs.python.org/issue16298) do not # properly close the connection in all cases. There is no harm # in redundantly calling close. self._fp.close() flush_decoder = True try: if decode_content and self._decoder: data = self._decoder.decompress(data) except (IOError, zlib.error): raise DecodeError("Received response with content-encoding: %s, but " "failed to decode it." % content_encoding) if flush_decoder and self._decoder: buf = self._decoder.decompress(binary_type()) data += buf + self._decoder.flush() if cache_content: self._body = data return data finally: if self._original_response and self._original_response.isclosed(): self.release_conn() def stream(self, amt=2**16, decode_content=None): """ A generator wrapper for the read() method. A call will block until ``amt`` bytes have been read from the connection or until the connection is closed. :param amt: How much of the content to read. The generator will return up to much data per iteration, but may return less. This is particularly likely when using compressed data. However, the empty string will never be returned. :param decode_content: If True, will attempt to decode the body based on the 'content-encoding' header. """ while not is_fp_closed(self._fp): data = self.read(amt=amt, decode_content=decode_content) if data: yield data @classmethod def from_httplib(ResponseCls, r, **response_kw): """ Given an :class:`httplib.HTTPResponse` instance ``r``, return a corresponding :class:`urllib3.response.HTTPResponse` object. Remaining parameters are passed to the HTTPResponse constructor, along with ``original_response=r``. """ # Normalize headers between different versions of Python headers = {} for k, v in r.getheaders(): # Python 3: Header keys are returned capitalised k = k.lower() has_value = headers.get(k) if has_value: # Python 3: Repeating header keys are unmerged. v = ', '.join([has_value, v]) headers[k] = v # HTTPResponse objects in Python 3 don't have a .strict attribute strict = getattr(r, 'strict', 0) return ResponseCls(body=r, headers=headers, status=r.status, version=r.version, reason=r.reason, strict=strict, original_response=r, **response_kw) # Backwards-compatibility methods for httplib.HTTPResponse def getheaders(self): return self.headers def getheader(self, name, default=None): return self.headers.get(name, default) # Overrides from io.IOBase def close(self): if not self.closed: self._fp.close() @property def closed(self): if self._fp is None: return True elif hasattr(self._fp, 'closed'): return self._fp.closed elif hasattr(self._fp, 'isclosed'): # Python 2 return self._fp.isclosed() else: return True def fileno(self): if self._fp is None: raise IOError("HTTPResponse has no file to get a fileno from") elif hasattr(self._fp, "fileno"): return self._fp.fileno() else: raise IOError("The file-like object this HTTPResponse is wrapped " "around has no file descriptor") def flush(self): if self._fp is not None and hasattr(self._fp, 'flush'): return self._fp.flush() def readable(self): return True
{ "content_hash": "1864b9bfcda46b7674a30ce03ab2b0fb", "timestamp": "", "source": "github", "line_count": 291, "max_line_length": 89, "avg_line_length": 33.80068728522337, "alnum_prop": 0.574013826758845, "repo_name": "slashk/prowl.alfredworkflow", "id": "74a5156c4382187aa39680c714ab963491f20ca6", "size": "10063", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "requests/packages/urllib3/response.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "664349" } ], "symlink_target": "" }
import ctypes from sheep.shepherd import Shepherd class Cons(Shepherd): def do_work(self): print "trying to be nasty" ctypes.string_at(1) if __name__ == "__main__": Cons.run()
{ "content_hash": "665b840d68d8c3a2e57b96eefe0f1f06", "timestamp": "", "source": "github", "line_count": 12, "max_line_length": 35, "avg_line_length": 17, "alnum_prop": 0.6127450980392157, "repo_name": "heynemann/sheep", "id": "bc442f4607559b92a8cf7f9240e9d05c159d2c00", "size": "251", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/cons.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "12496" } ], "symlink_target": "" }
"""An RFC-4217 asynchronous FTPS server supporting both SSL and TLS. Requires PyOpenSSL module (http://pypi.python.org/pypi/pyOpenSSL). """ import os from pyftpdlib import ftpserver from pyftpdlib.contrib.handlers import TLS_FTPHandler CERTFILE = os.path.abspath(os.path.join(os.path.dirname(__file__), "keycert.pem")) def main(): authorizer = ftpserver.DummyAuthorizer() authorizer.add_user('user', '12345', '.', perm='elradfmw') authorizer.add_anonymous('.') ftp_handler = TLS_FTPHandler ftp_handler.certfile = CERTFILE ftp_handler.authorizer = authorizer # requires SSL for both control and data channel #ftp_handler.tls_control_required = True #ftp_handler.tls_data_required = True ftpd = ftpserver.FTPServer(('', 8021), ftp_handler) ftpd.serve_forever() if __name__ == '__main__': main()
{ "content_hash": "b543b95f7bb4d180eb1808084a874b04", "timestamp": "", "source": "github", "line_count": 28, "max_line_length": 68, "avg_line_length": 31.678571428571427, "alnum_prop": 0.6696730552423901, "repo_name": "Teamxrtc/webrtc-streaming-node", "id": "71ed8205833367608d0914157c780114407b6458", "size": "2311", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "third_party/webrtc/src/chromium/src/third_party/pyftpdlib/src/demo/tls_ftpd.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "44" }, { "name": "C++", "bytes": "221840" }, { "name": "HTML", "bytes": "2383" }, { "name": "JavaScript", "bytes": "37396" }, { "name": "Python", "bytes": "2860" }, { "name": "Shell", "bytes": "104" } ], "symlink_target": "" }
import pyaf.Bench.TS_datasets as tsds import tests.artificial.process_artificial_dataset as art art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 30, transform = "None", sigma = 0.0, exog_count = 0, ar_order = 0);
{ "content_hash": "33a54b670e8d8d85dfec60b3f6a224eb", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 158, "avg_line_length": 36.857142857142854, "alnum_prop": 0.6976744186046512, "repo_name": "antoinecarme/pyaf", "id": "48801f0d2032420a8adfd7266a7868cee5303896", "size": "258", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/artificial/transf_None/trend_Lag1Trend/cycle_30/ar_/test_artificial_32_None_Lag1Trend_30__0.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Makefile", "bytes": "6773299" }, { "name": "Procfile", "bytes": "24" }, { "name": "Python", "bytes": "54209093" }, { "name": "R", "bytes": "807" }, { "name": "Shell", "bytes": "3619" } ], "symlink_target": "" }
class _Namespace: def __init__(self): self._scope = 0 self._space = {self._scope: {}} def add_name(self, name, value): self._space[self._scope][name] = value def del_name(self, name): del self._space[self._scope][name] def add_scope(self): self._scope += 1 self._space[self._scope] = {} def del_scope(self): self._scope -= 1 del self._space[self._scope] def exists(self, name): return name in self._space[self._scope] def get(self, name): if self.exists(name): return self._space[self._scope][name] return None @property def scope(self): return self._scope class Context: def __init__(self): self.namespace = _Namespace() self.typespace = _Namespace() self.funcspace = _Namespace()
{ "content_hash": "cf647f59c7cc29a430a0faadf9ed0e4e", "timestamp": "", "source": "github", "line_count": 39, "max_line_length": 49, "avg_line_length": 22.17948717948718, "alnum_prop": 0.5479768786127167, "repo_name": "adrian-lang/adrian.cgen", "id": "231a7cf09b073f97be186fe85fef176ce441f851", "size": "865", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "adrian/cgen/_context.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "46182" } ], "symlink_target": "" }
""" Functions to generate the HTML output """ from jinja2 import Template def load_template(template_file_path): """ Load a Jinja2 template and return it Args: template_file_path (str): path to the Jinja2 template file Returns: Template: contains jinja2 template """ return Template(open(template_file_path).read()) def render_template(template, context): """ Generate an HTML test report. Args: template (Template): Jinja2 Template object containing the template to render context (dict): the context to pass to the template Returns: str: the contents of the rendered template """ return template.render(context)
{ "content_hash": "c120ce8a18290e818111f9425933138b", "timestamp": "", "source": "github", "line_count": 31, "max_line_length": 85, "avg_line_length": 23.032258064516128, "alnum_prop": 0.6666666666666666, "repo_name": "mgrijalva/nose2-html-report", "id": "67bbc674af0185f60b08f580689ae283a61aa3c5", "size": "714", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "nose2_html_report/render.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "7896" }, { "name": "Python", "bytes": "10314" } ], "symlink_target": "" }
from django.test import TestCase, RequestFactory from django.test import Client from django.core import mail from django.utils import timezone from django.conf import settings from unittest.mock import patch from freezegun import freeze_time from studygroups.models import StudyGroup from studygroups.models import Facilitator from studygroups.models import Profile from studygroups.models import Course from studygroups.models import generate_all_meetings from studygroups.models import generate_all_meeting_dates from studygroups.models import generate_meetings_from_dates from studygroups.models import Team, TeamMembership from studygroups.views import LearningCircleListView from custom_registration.models import create_user from django.contrib.auth.models import User import datetime import json class TestLearningCircleApi(TestCase): fixtures = ['test_teams.json', 'test_courses.json', 'test_studygroups.json'] def setUp(self): with patch('custom_registration.signals.send_email_confirm_email'): user = create_user('faci@example.net', 'Bobjanechris', 'Trailer', 'password', False) user.save() self.facilitator = user mailchimp_patcher = patch('studygroups.models.profile.update_mailchimp_subscription') self.mock_maichimp = mailchimp_patcher.start() self.addCleanup(mailchimp_patcher.stop) def test_create_learning_circle(self): c = Client() c.login(username='faci@example.net', password='password') data = { "name": "Test learning circle", "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "1", "online": "false", "language": "en", "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "facilitator_concerns": "blah blah", } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.facilitator_set.all().count(), 1) self.assertEqual(lc.facilitator_set.first().user_id, lc.created_by_id) self.assertEqual(lc.course.id, 3) self.assertEqual(lc.name, "Test learning circle") self.assertEqual(lc.description, 'Lets learn something') self.assertEqual(lc.course_description, 'A real great course') self.assertEqual(lc.start_date, datetime.date(2018,2,12)) self.assertEqual(lc.meeting_time, datetime.time(17,1)) self.assertEqual(lc.meeting_set.all().count(), 2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Your “{}” learning circle in {} has been created!'.format(lc.name, lc.city)) self.assertIn('faci@example.net', mail.outbox[0].to) self.assertIn('community@localhost', mail.outbox[0].cc) def test_create_learning_circle_with_facilitator_set(self): cofacilitator = create_user('cofaci@example.net', 'ba', 'ta', 'password', False) c = Client() c.login(username='faci@example.net', password='password') data = { "name": "Test learning circle", "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "1", "online": "false", "language": "en", "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "facilitator_concerns": "blah blah", "facilitators": [cofacilitator.pk], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) with patch('studygroups.views.api.send_cofacilitator_email.delay') as send_cofacilitator_email: resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), { "status": "error", "errors": { "facilitators": ["Facilitator not part of a team"], } }) team = Team.objects.create(name='awesome team') TeamMembership.objects.create(team=team, user=self.facilitator, role=TeamMembership.ORGANIZER) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), { "status": "error", "errors": { "facilitators": ["Facilitators not part of the same team"], } }) TeamMembership.objects.create(team=team, user=cofacilitator, role=TeamMembership.MEMBER) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.facilitator_set.all().count(), 2) self.assertIn(cofacilitator.id, lc.facilitator_set.all().values_list('user_id', flat=True)) self.assertEqual(len(mail.outbox), 2) def test_create_learning_circle_without_name_or_course_description(self): c = Client() c.login(username='faci@example.net', password='password') data = { "course": 3, "description": "Lets learn something", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "1", "online": "false", "language": "en", "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "facilitator_concerns": "blah blah", } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.name, lc.course.title) self.assertEqual(lc.course_description, lc.course.caption) @freeze_time('2018-01-20') def test_create_learning_circle_and_publish(self): c = Client() c.login(username='faci@example.net', password='password') self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() data = { "name": "Test learning circle", "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "1", "online": "false", "language": "en", "facilitator_concerns": "blah blah", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": False, "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) self.assertEqual(len(mail.outbox), 0) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.facilitator_set.count(), 1) self.assertEqual(lc.course.id, 3) self.assertEqual(lc.draft, False) self.assertEqual(lc.name, "Test learning circle") self.assertEqual(lc.description, 'Lets learn something') self.assertEqual(lc.course_description, 'A real great course') self.assertEqual(lc.start_date, datetime.date(2018,2,12)) self.assertEqual(lc.meeting_time, datetime.time(17,1)) self.assertEqual(lc.meeting_set.all().count(), 2) self.assertEqual(lc.reminder_set.count(), 2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Your “{}” learning circle in {} has been created!'.format(lc.name, lc.city)) self.assertIn('faci@example.net', mail.outbox[0].to) self.assertIn('community@localhost', mail.outbox[0].cc) # TODO test that correct faciltators are mentioned in reminders @freeze_time('2018-01-20') def test_create_learning_circle_and_publish_user_unconfirmed(self): c = Client() c.login(username='faci@example.net', password='password') data = { "name": "Storytelling with Sharon", "course": 3, "course_description": "A real great course", "description": "Lets learn something", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": False, "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json().get('status'), 'created') self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(StudyGroup.objects.last().draft, True) self.assertEqual(len(mail.outbox), 1) def test_create_learning_circle_welcome(self): c = Client() c.login(username='faci@example.net', password='password') data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "1", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) # Test without concern only p2pu should be CC-ed resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.course.id, 3) self.assertEqual(lc.description, 'Lets learn something') self.assertEqual(lc.start_date, datetime.date(2018,2,12)) self.assertEqual(lc.meeting_time, datetime.time(17,1)) self.assertEqual(lc.meeting_set.all().count(), 2) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Your “{}” learning circle in {} has been created!'.format(lc.name, lc.city)) self.assertIn('faci@example.net', mail.outbox[0].to) self.assertIn('thepeople@p2pu.org', mail.outbox[0].cc) self.assertEqual(len(mail.outbox[0].cc), 1) # Test with concern - welcome committee should be cc-ed mail.outbox = [] data['facilitator_concerns'] = 'How should I advertise' resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Your “{}” learning circle in {} has been created!'.format(lc.name, lc.city)) self.assertIn('faci@example.net', mail.outbox[0].to) self.assertIn('thepeople@p2pu.org', mail.outbox[0].cc) self.assertIn('community@localhost', mail.outbox[0].cc) self.assertIn(data['facilitator_concerns'], mail.outbox[0].body) self.assertEqual(len(mail.outbox[0].cc), 2) # Test as part of team with concern - organizer and welcome should be CC-ed team = Team.objects.create(name='awesome team') team.save() organizer = create_user('org@niz.er', 'organ', 'izer', 'passowrd', False) TeamMembership.objects.create(team=team, user=organizer, role=TeamMembership.ORGANIZER) TeamMembership.objects.create(team=team, user=self.facilitator, role=TeamMembership.MEMBER) mail.outbox = [] resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Your “{}” learning circle in {} has been created!'.format(lc.name, lc.city)) self.assertIn('faci@example.net', mail.outbox[0].to) self.assertIn('thepeople@p2pu.org', mail.outbox[0].cc) self.assertIn('community@localhost', mail.outbox[0].cc) self.assertIn('org@niz.er', mail.outbox[0].cc) self.assertEqual(len(mail.outbox[0].cc), 3) # Test as part of team - team organizer should be cc-ed mail.outbox = [] del data['facilitator_concerns'] resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(len(mail.outbox), 1) self.assertEqual(mail.outbox[0].subject, 'Your “{}” learning circle in {} has been created!'.format(lc.name, lc.city)) self.assertIn('faci@example.net', mail.outbox[0].to) self.assertIn('thepeople@p2pu.org', mail.outbox[0].cc) self.assertIn('org@niz.er', mail.outbox[0].cc) self.assertEqual(len(mail.outbox[0].cc), 2) def test_update_learning_circle(self): self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() c = Client() c.login(username='faci@example.net', password='password') data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "4", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": False, "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) # Update learning circle lc = StudyGroup.objects.all().last() self.assertFalse(lc.draft) url = '/api/learning-circle/{}/'.format(lc.pk) data['course'] = 1 data["description"] = "Lets learn something else" data["name"] = "A new LC name" data["facilitators"] = [f.user_id for f in lc.facilitator_set.all()] # date shouldn't matter, but lets make it after the lc started with freeze_time('2019-03-01'): resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()['status'], 'updated') lc = StudyGroup.objects.all().last() self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.course.id, 1) self.assertEqual(lc.description, "Lets learn something else") # test that reminders were regenerated self.assertIn('A new LC name', lc.reminder_set.first().email_subject) def test_update_learning_circle_date(self): c = Client() c.login(username='faci@example.net', password='password') self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "4", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": False, "meetings": [ { "meeting_date": "2018-12-15", "meeting_time": "17:01" }, { "meeting_date": "2018-12-22", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.meeting_set.active().count(), 2) data["facilitators"] = [f.user_id for f in lc.facilitator_set.all()] # update more than 2 days before start with freeze_time("2018-12-12"): data['meetings'] = [ { "meeting_date": "2018-12-20", "meeting_time": "17:01" }, { "meeting_date": "2018-12-27", "meeting_time": "17:01" }, ] url = '/api/learning-circle/{}/'.format(lc.pk) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.json(), { "status": "updated", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) lc = StudyGroup.objects.all().last() self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.start_date, datetime.date(2018, 12, 20)) self.assertEqual(lc.meeting_set.active().count(), 2) def test_update_draft_learning_circle_date(self): c = Client() c.login(username='faci@example.net', password='password') self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "4", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": True, "meetings": [ { "meeting_date": "2018-12-15", "meeting_time": "17:01" }, { "meeting_date": "2018-12-22", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) with freeze_time('2018-12-01'): resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.meeting_set.active().count(), 2) data["facilitators"] = [f.user_id for f in lc.facilitator_set.all()] # update less than 2 days before with freeze_time("2018-12-14"): data["meetings"] = [ { "meeting_date": "2018-12-20", "meeting_time": "17:01" }, { "meeting_date": "2018-12-27", "meeting_time": "17:01" }, ] url = '/api/learning-circle/{}/'.format(lc.pk) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), { "status": "updated", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) lc = StudyGroup.objects.all().last() self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.start_date, datetime.date(2018, 12, 20)) self.assertEqual(lc.meeting_set.active().count(), 2) # update more than 2 days before with freeze_time("2018-12-12"): data["meetings"] = [ { "meeting_date": "2018-12-19", "meeting_time": "17:01" }, { "meeting_date": "2018-12-26", "meeting_time": "17:01" }, ] url = '/api/learning-circle/{}/'.format(lc.pk) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.json(), { "status": "updated", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) lc = StudyGroup.objects.all().last() self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.start_date, datetime.date(2018, 12, 19)) self.assertEqual(lc.meeting_set.active().count(), 2) def test_update_learning_circle_facilitators(self): cofacilitator = create_user('cofaci@example.net', 'badumorum', 'ta', 'password', False) self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() c = Client() c.login(username='faci@example.net', password='password') data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "4", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": False, "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) # Update learning circle lc = StudyGroup.objects.all().last() self.assertFalse(lc.draft) url = '/api/learning-circle/{}/'.format(lc.pk) data["facilitators"] = [self.facilitator.pk, cofacilitator.pk] with patch('studygroups.views.api.send_cofacilitator_email.delay') as send_cofacilitator_email: resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), { "status": "error", "errors": { "facilitators": ["Facilitator not part of a team"], } }) team = Team.objects.create(name='Team Awesome') lc.team = team lc.save() TeamMembership.objects.create(team=team, user=self.facilitator) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), { "status": "error", "errors": { "facilitators": ["Facilitators not part of the same team"], } }) TeamMembership.objects.create(team=team, user=cofacilitator) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()['status'], 'updated') self.assertTrue(send_cofacilitator_email.called) self.assertIn(self.facilitator.first_name, lc.reminder_set.first().email_body) self.assertIn(cofacilitator.first_name, lc.reminder_set.first().email_body) c = Client() c.login(username='cofaci@example.net', password='password') resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()['status'], 'updated') c = Client() c.login(username='faci@example.net', password='password') data["facilitators"] = [] resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), { "status": "error", "errors": { "facilitators": ["Cannot remove all faclitators from a learning circle"], } }) with patch('studygroups.views.api.send_cofacilitator_removed_email.delay') as send_cofacilitator_removed_email: data["facilitators"] = [cofacilitator.id] resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()['status'], 'updated') self.assertTrue(send_cofacilitator_removed_email.called) self.assertNotIn(self.facilitator.first_name, lc.reminder_set.first().email_body) self.assertIn(cofacilitator.first_name, lc.reminder_set.first().email_body) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 403) c = Client() c.login(username='cofaci@example.net', password='password') resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()['status'], 'updated') @freeze_time('2018-01-20') def test_publish_learning_circle(self): self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() c = Client() c.login(username='faci@example.net', password='password') data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "75 Harrington", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "4", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.meeting_set.all().count(), 2) data['draft'] = False data['facilitators'] = [lc.created_by_id] # Update learning circle url = '/api/learning-circle/{}/'.format(lc.pk) resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json()['status'], 'updated') self.assertEqual(StudyGroup.objects.all().count(), 5) self.assertEqual(lc.meeting_set.all().count(), 2) def test_get_learning_circles(self): c = Client() resp = c.get('/api/learningcircles/') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 4) def test_get_learning_circles_unicode(self): self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() c = Client() c.login(username='faci@example.net', password='password') data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "الصحة النفسية للطفل", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "4", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": False, "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) with freeze_time('2018-01-20'): resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) lc = StudyGroup.objects.all().last() self.assertEqual(resp.json(), { "status": "created", "studygroup_url": "{}://{}/en/studygroup/{}/".format(settings.PROTOCOL, settings.DOMAIN, lc.pk) }) self.assertEqual(StudyGroup.objects.all().count(), 5) c = Client() resp = c.get('/api/learningcircles/') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 5) def test_get_learning_circles_invalid_venue_name(self): self.facilitator.profile.email_confirmed_at = timezone.now() self.facilitator.profile.save() c = Client() c.login(username='faci@example.net', password='password') data = { "course": 3, "description": "Lets learn something", "course_description": "A real great course", "venue_name": "/@@", "venue_details": "top floor", "venue_address": "75 Harrington", "city": "Cape Town", "country": "South Africa", "country_en": "South Africa", "region": "Western Cape", "latitude": 3.1, "longitude": "1.3", "place_id": "4", "online": "false", "language": "en", "meeting_time": "17:01", "duration": 50, "timezone": "UTC", "image": "/media/image.png", "draft": False, "meetings": [ { "meeting_date": "2018-02-12", "meeting_time": "17:01" }, { "meeting_date": "2018-02-19", "meeting_time": "17:01" }, ], } url = '/api/learning-circle/' self.assertEqual(StudyGroup.objects.all().count(), 4) with freeze_time('2018-01-20'): resp = c.post(url, data=json.dumps(data), content_type='application/json') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json(), { "status": "error", "errors": {"venue_name": [ 'Venue name should include at least one alpha-numeric character.' ]}, }) self.assertEqual(StudyGroup.objects.all().count(), 4) def test_get_learning_circles_drafts(self): c = Client() sg = StudyGroup.objects.get(pk=1) sg.draft = True sg.save() # exclude drafts by default resp = c.get('/api/learningcircles/') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 3) # include drafts resp = c.get('/api/learningcircles/?draft=true') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 4) self.assertEqual(resp.json()["items"][0]['id'], 1) self.assertEqual(resp.json()["items"][0]['draft'], True) def test_get_learning_circles_by_weekday(self): sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2018,1,26) sg.save() sg = StudyGroup.objects.get(pk=2) sg.start_date = datetime.date(2018,1,27) sg.save() c = Client() # Friday and Saturday resp = c.get('/api/learningcircles/?weekdays=4,5') self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 2) def test_get_learning_circles_full_text_search(self): c = Client() # partial word match resp = c.get('/api/learningcircles/', {'q': 'aca'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 2) self.assertEqual(data["items"][0]["course"]["title"], "Academic Writing") self.assertEqual(data["items"][1]["course"]["provider"], "Khan Academy") resp = c.get('/api/learningcircles/', {'q': 'acad'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 2) self.assertEqual(data["items"][0]["course"]["title"], "Academic Writing") self.assertEqual(data["items"][1]["course"]["provider"], "Khan Academy") resp = c.get('/api/learningcircles/', {'q': 'acade'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 2) self.assertEqual(data["items"][0]["course"]["title"], "Academic Writing") self.assertEqual(data["items"][1]["course"]["provider"], "Khan Academy") resp = c.get('/api/learningcircles/', {'q': 'academ'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 2) self.assertEqual(data["items"][0]["course"]["title"], "Academic Writing") self.assertEqual(data["items"][1]["course"]["provider"], "Khan Academy") resp = c.get('/api/learningcircles/', {'q': 'writ'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 1) self.assertEqual(data["items"][0]["course"]["title"], "Academic Writing") resp = c.get('/api/learningcircles/', {'q': 'writing'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 1) self.assertEqual(data["items"][0]["course"]["title"], "Academic Writing") # full word match resp = c.get('/api/learningcircles/', {'q': 'academy'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 1) self.assertEqual(data["items"][0]["course"]["provider"], "Khan Academy") resp = c.get('/api/learningcircles/', {'q': 'academic'}) self.assertEqual(resp.status_code, 200) data = resp.json() self.assertEqual(data["count"], 1) self.assertEqual(data["items"][0]["course"]["title"], "Academic Writing") @freeze_time("2019-05-31") def test_get_learning_circles_by_scope(self): sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2019,6,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.save() sg.refresh_from_db() generate_all_meetings(sg) sg = StudyGroup.objects.get(pk=2) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.save() sg.refresh_from_db() generate_all_meetings(sg) sg = StudyGroup.objects.get(pk=3) sg.start_date = datetime.date(2019,5,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.save() sg.refresh_from_db() generate_all_meetings(sg) sg = StudyGroup.objects.get(pk=4) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.draft = True sg.save() c = Client() # active scope resp = c.get('/api/learningcircles/?scope=active&draft=true') data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(data["count"], 3) self.assertEqual(data["items"][0]["id"], 1) self.assertEqual(data["items"][1]["id"], 2) self.assertEqual(data["items"][2]["id"], 4) self.assertEqual(data["items"][2]["draft"], True) # upcoming scope resp = c.get('/api/learningcircles/?scope=upcoming&draft=true') data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(data["count"], 2) self.assertEqual(data["items"][0]["id"], 1) self.assertEqual(data["items"][1]["id"], 4) # current scope resp = c.get('/api/learningcircles/?scope=current') data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 1) self.assertEqual(data["items"][0]["id"], 2) # completed scope resp = c.get('/api/learningcircles/?scope=completed') data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 1) self.assertEqual(data["items"][0]["id"], 3) @freeze_time("2019-05-31") def test_get_learning_circles_next_meeting(self): sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.save() sg.refresh_from_db() generate_all_meetings(sg) c = Client() resp = c.get('/api/learningcircles/?scope=active') data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 1) self.assertEqual(data["items"][0]["next_meeting_date"], "2019-06-06") @freeze_time("2015-03-21") def test_get_learning_circles_last_meeting(self): sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2015,2,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.save() sg.refresh_from_db() generate_all_meetings(sg) c = Client() resp = c.get('/api/learningcircles/?scope=completed') data = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(resp.json()["count"], 1) self.assertEqual(data["items"][0]["last_meeting_date"], "2015-02-15") @freeze_time("2019-05-31") def test_get_learning_circles_signup_open(self): # open for signup sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2019,6,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) # closed for signup because it's set as closed sg = StudyGroup.objects.get(pk=2) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = False sg.save() sg.refresh_from_db() generate_all_meetings(sg) # closed for signup because the last meeting is in the past sg = StudyGroup.objects.get(pk=3) sg.start_date = datetime.date(2019,5,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) # doesn't show up in results because it's in draft sg = StudyGroup.objects.get(pk=4) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.draft = True sg.save() c = Client() # open for signup resp = c.get('/api/learningcircles/?signup=open') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 1) self.assertEqual(result["signup_open_count"], 1) self.assertEqual(result["signup_closed_count"], 2) self.assertEqual(result['items'][0]['signup_open'], True) # closed for signup resp = c.get('/api/learningcircles/?signup=closed') self.assertEqual(resp.status_code, 200) result = resp.json() self.assertEqual(result["count"], 2) self.assertEqual(result["signup_open_count"], 1) self.assertEqual(result["signup_closed_count"], 2) self.assertEqual(result['items'][0]['signup_open'], False) self.assertEqual(result['items'][1]['signup_open'], False) @freeze_time("2019-05-31") def test_get_learning_cirlces_order(self): sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2019,5,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) sg = StudyGroup.objects.get(pk=2) sg.start_date = datetime.date(2019,5,8) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) sg = StudyGroup.objects.get(pk=3) sg.start_date = datetime.date(2019,5,15) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) sg = StudyGroup.objects.get(pk=4) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() c = Client() # ordered by first meeting date (asc) resp = c.get('/api/learningcircles/?order=first_meeting_date') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 4) self.assertEqual(result['items'][0]['id'], 1) self.assertEqual(result['items'][1]['id'], 2) # ordered by last meeting date (desc) resp = c.get('/api/learningcircles/?order=last_meeting_date') self.assertEqual(resp.status_code, 200) result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 4) self.assertEqual(result['items'][0]['id'], 4) self.assertEqual(result['items'][1]['id'], 3) def test_get_learning_circles_by_topics(self): c = Client() # single topic resp = c.get('/api/learningcircles/?topics=math') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 1) self.assertEqual(result["items"][0]['course']['id'], 1) # multiple topics resp = c.get('/api/learningcircles/?topics=math,writing') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 2) self.assertEqual(result["items"][0]['course']['id'], 2) self.assertEqual(result["items"][1]['course']['id'], 1) def test_get_learning_circles_by_location(self): sg = StudyGroup.objects.get(pk=1) # boston coordinates sg.latitude = '42.360200' sg.longitude = '-71.058300' sg.save() c = Client() resp = c.get('/api/learningcircles/?latitude=42.372028&longitude=-71.103081&distance=50') # cambridge coords result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 1) self.assertEqual(result["items"][0]['id'], 1) resp = c.get('/api/learningcircles/?latitude=43.466120&longitude=-80.525158&distance=50') # waterloo coords result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 0) def test_get_learning_circles_limit_offset(self): c = Client() resp = c.get('/api/learningcircles/?limit=1&offset=0') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 4) self.assertEqual(result["limit"], 1) self.assertEqual(result["offset"], 0) self.assertEqual(len(result["items"]), 1) resp = c.get('/api/learningcircles/?limit=1&offset=1') self.assertEqual(resp.status_code, 200) result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 4) self.assertEqual(result["limit"], 1) self.assertEqual(result["offset"], 1) self.assertEqual(len(result["items"]), 1) def test_get_learning_circles_by_city(self): sg = StudyGroup.objects.get(pk=4) sg.city = 'Kitchener' sg.save() c = Client() resp = c.get('/api/learningcircles/?city=Kitchener') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 1) self.assertEqual(result["items"][0]['id'], 4) def test_get_learning_circles_by_team(self): sg_count = StudyGroup.objects.count() facilitator2 = User.objects.get(pk=2) self.assertEqual(facilitator2.teammembership_set.active().count(), 1) team = facilitator2.teammembership_set.active().first().team sgdata = dict( course=Course.objects.first(), created_by=facilitator2, description='blah', venue_name='ACME public library', venue_address='ACME rd 1', venue_details='venue_details', city='city', latitude=0, longitude=0, start_date=datetime.date(2010,1,1), end_date=datetime.date(2010,1,1) + datetime.timedelta(weeks=6), meeting_time=datetime.time(12,0), duration=90, timezone='GMT', facilitator_goal='the_facilitator_goal', facilitator_concerns='the_facilitators_concerns', draft=False, ) sg = StudyGroup(**sgdata) sg.save() sg_ids = [sg.pk] self.assertEqual(sg.team_id, team.id) meeting_dates = generate_all_meeting_dates( sg.start_date, sg.meeting_time, 6 ) generate_meetings_from_dates(sg, meeting_dates) sgdata['name'] = 'another lc' sg = StudyGroup(**sgdata) sg.save() sg_ids += [sg.id] meeting_dates = generate_all_meeting_dates( sg.start_date, sg.meeting_time, 6 ) generate_meetings_from_dates(sg, meeting_dates) self.assertEqual(sg.team_id, team.id) self.assertEqual(sg_count + 2, StudyGroup.objects.count()) self.assertEqual(StudyGroup.objects.active().filter(team=team).count(), 2) c = Client() resp = c.get(f'/api/learningcircles/?team_id={team.id}') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 2) self.assertIn(result["items"][0]['id'], sg_ids) self.assertIn(result["items"][1]['id'], sg_ids) # remove user from team facilitator2.teammembership_set.active().first().delete() # ensure learning circles are still returned for the team resp = c.get(f'/api/learningcircles/?team_id={team.id}') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 2) self.assertIn(result["items"][0]['id'], sg_ids) self.assertIn(result["items"][1]['id'], sg_ids) def test_get_learning_circle_by_id(self): c = Client() resp = c.get('/api/learningcircles/?id=3') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 1) self.assertEqual(result["items"][0]['id'], 3) def test_get_learning_circles_by_user(self): factory = RequestFactory() request = factory.get('/api/learningcircles/?user=true') user = self.facilitator sg = StudyGroup.objects.get(pk=2) sg.created_by = user sg.save() Facilitator.objects.create(study_group=sg, user=user) request.user = user resp = LearningCircleListView.as_view()(request) result = json.loads(resp.content) self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 1) self.assertEqual(result["items"][0]['id'], 2) def test_get_learning_circle_cities(self): c = Client() resp = c.get('/api/learningcircles/cities/') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 4) self.assertEqual(result["items"][0], { 'label': 'Boston', 'value': 'Boston' }) self.assertEqual(result["items"][1], { 'label': 'Chicago', 'value': 'Chicago' }) self.assertEqual(result["items"][2], { 'label': 'Kansas City', 'value': 'Kansas City' }) self.assertEqual(result["items"][3], { 'label': 'Toronto', 'value': 'Toronto' }) @freeze_time("2019-05-31") def test_get_learning_circles_no_meetings(self): # end_date in the past sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2019,5,14) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) # start_date in past, end_date in the future sg = StudyGroup.objects.get(pk=2) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) # start_date in future sg = StudyGroup.objects.get(pk=3) sg.start_date = datetime.date(2019,6,2) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) c = Client() resp = c.get('/api/learningcircles/?signup=open') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 2) self.assertEqual(result["items"][0]["id"], 2) self.assertEqual(result["items"][0]["last_meeting_date"], '2019-06-13') self.assertEqual(result["items"][1]["id"], 3) self.assertEqual(result["items"][1]["last_meeting_date"], '2019-06-16') @freeze_time("2019-05-31") def test_get_learning_circles_status(self): # upcoming sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2019,6,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) # in progress sg = StudyGroup.objects.get(pk=2) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() sg.refresh_from_db() generate_all_meetings(sg) # closed sg = StudyGroup.objects.get(pk=3) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = False sg.save() sg.refresh_from_db() generate_all_meetings(sg) # completed sg = StudyGroup.objects.get(pk=4) sg.start_date = datetime.date(2019,5,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() c = Client() resp = c.get('/api/learningcircles/') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 4) self.assertEqual(result["signup_open_count"], 2) self.assertEqual(result["signup_closed_count"], 2) self.assertEqual(result['items'][0]['status'], 'upcoming') self.assertEqual(result['items'][1]['status'], 'in_progress') self.assertEqual(result['items'][2]['status'], 'closed') self.assertEqual(result['items'][3]['status'], 'completed') @freeze_time("2019-05-31") def test_get_learning_circles_status_closed_completed(self): # closed upcoming sg = StudyGroup.objects.get(pk=1) sg.start_date = datetime.date(2019,6,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = False sg.save() sg.refresh_from_db() generate_all_meetings(sg) # closed in progress sg = StudyGroup.objects.get(pk=2) sg.start_date = datetime.date(2019,5,30) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = False sg.save() sg.refresh_from_db() generate_all_meetings(sg) # closed in past (should be completed status) sg = StudyGroup.objects.get(pk=3) sg.start_date = datetime.date(2019,4,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = False sg.save() sg.refresh_from_db() generate_all_meetings(sg) # open in past (should be completed status) sg = StudyGroup.objects.get(pk=4) sg.start_date = datetime.date(2019,4,1) sg.end_date = sg.start_date + datetime.timedelta(weeks=2) sg.signup_open = True sg.save() c = Client() resp = c.get('/api/learningcircles/') result = resp.json() self.assertEqual(resp.status_code, 200) self.assertEqual(result["count"], 4) self.assertEqual(result["signup_open_count"], 0) self.assertEqual(result["signup_closed_count"], 4) self.assertEqual(result['items'][0]['status'], 'closed') self.assertEqual(result['items'][1]['status'], 'closed') self.assertEqual(result['items'][2]['status'], 'completed') self.assertEqual(result['items'][3]['status'], 'completed')
{ "content_hash": "49d65f1e4236840d42314dd6fb97e200", "timestamp": "", "source": "github", "line_count": 1534, "max_line_length": 126, "avg_line_length": 40.428292046936114, "alnum_prop": 0.5659738458809681, "repo_name": "p2pu/learning-circles", "id": "86d8644f9a26fb88083247dbb42c8855696688a6", "size": "62074", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "studygroups/tests/api/test_learning_circle_api.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6537" }, { "name": "Dockerfile", "bytes": "2110" }, { "name": "HTML", "bytes": "222765" }, { "name": "JavaScript", "bytes": "202138" }, { "name": "Python", "bytes": "859945" }, { "name": "SCSS", "bytes": "122949" }, { "name": "Shell", "bytes": "808" } ], "symlink_target": "" }
"""Suite Required Suite: Events that every application should support Level 1, version 1 Generated from /Applications/Internet Explorer.app AETE/AEUT resource version 1/0, language 0, script 0 """ import aetools import MacOS _code = 'reqd' from StdSuites.Required_Suite import * class Required_Suite_Events(Required_Suite_Events): def open(self, _object, _attributes={}, **_arguments): """open: Open documents Required argument: undocumented, typecode 'alis' Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'aevt' _subcode = 'odoc' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def print_(self, _object, _attributes={}, **_arguments): """print: Print documents Required argument: undocumented, typecode 'alis' Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'aevt' _subcode = 'pdoc' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def quit(self, _no_object=None, _attributes={}, **_arguments): """quit: Quit application Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'aevt' _subcode = 'quit' if _arguments: raise TypeError, 'No optional args expected' if _no_object != None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def run(self, _no_object=None, _attributes={}, **_arguments): """run: Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'aevt' _subcode = 'oapp' if _arguments: raise TypeError, 'No optional args expected' if _no_object != None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] # # Indices of types declared in this module # _classdeclarations = { } _propdeclarations = { } _compdeclarations = { } _enumdeclarations = { }
{ "content_hash": "767e14d9e5279456f2b6651e75fa7c32", "timestamp": "", "source": "github", "line_count": 108, "max_line_length": 72, "avg_line_length": 30.73148148148148, "alnum_prop": 0.600180777342573, "repo_name": "MalloyPower/parsing-python", "id": "0a985d123bba45f3dd16f8113174eb651226ea8d", "size": "3319", "binary": false, "copies": "8", "ref": "refs/heads/master", "path": "front-end/testsuite-python-lib/Python-2.5/Lib/plat-mac/lib-scriptpackages/Explorer/Required_Suite.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "1963" }, { "name": "Lex", "bytes": "238458" }, { "name": "Makefile", "bytes": "4513" }, { "name": "OCaml", "bytes": "412695" }, { "name": "Python", "bytes": "17319" }, { "name": "Rascal", "bytes": "523063" }, { "name": "Yacc", "bytes": "429659" } ], "symlink_target": "" }
# class _DefaultsStandalone(object): """Do not use this class outside of this module.""" SPAWN_CLIENT_DAEMON = False SPAWN_SERVER_DAEMON = False SERVER_IPC_PORT = 54321 CLIENT_IPC_PORT = 12345 SERVER_ADDRESS = "127.0.0.1" SERVER_COAP_PORT = 5683 CLIENT_COAP_PORT = 6000 #DAEMON_PATH = "../.." SERVER_LOG_FILE = "awa_serverd.log" CLIENT_LOG_FILE = "awa_clientd.log" BOOTSTRAP_CONFIG_FILE = "../../localhost.bsc" CLIENT_ENDPOINT_NAME = "TestClient" class _DefaultsSpawn(object): """Do not use this class outside of this module.""" SPAWN_CLIENT_DAEMON = True SPAWN_SERVER_DAEMON = True SERVER_IPC_PORT = range(61731, 61741) CLIENT_IPC_PORT = range(61742, 61752) SERVER_ADDRESS = "127.0.0.1" SERVER_COAP_PORT = 6101 CLIENT_COAP_PORT = 6102 #DAEMON_PATH = "../.." SERVER_LOG_FILE = "awa_serverd.log" CLIENT_LOG_FILE = "awa_clientd.log" BOOTSTRAP_CONFIG_FILE = "../../localhost.bsc" CLIENT_ENDPOINT_NAME = "imgClient" # select a default configuration class _DefaultConfigurationClass = _DefaultsSpawn g_portIndex = 0 class Config(object): def __init__(self, configuration=_DefaultConfigurationClass): self._configurationClass = configuration # attempt to select from a list of available ports (avoids 'listener: failed to bind socket'). # if this fails, the ports are single values try: global g_portIndex self._serverIpcPort = self._configurationClass.SERVER_IPC_PORT[g_portIndex % len(self._configurationClass.SERVER_IPC_PORT)] self._clientIpcPort = self._configurationClass.CLIENT_IPC_PORT[g_portIndex % len(self._configurationClass.CLIENT_IPC_PORT)] g_portIndex += 1 #print "Config using rotating ports. Client IPC port: %d Server IPC port: %d" % (self._clientIpcPort, self._serverIpcPort) #pprint(self._configurationClass.SERVER_IPC_PORT) except TypeError: # server / client IPC ports are single values self._serverIpcPort = self._configurationClass.SERVER_IPC_PORT self._clientIpcPort = self._configurationClass.CLIENT_IPC_PORT self._bootstrapConfigFile = self._configurationClass.BOOTSTRAP_CONFIG_FILE self._objectDefinitionsFile = self._configurationClass.OBJECT_DEFINITIONS_FILE #print "Config using single ports. Client IPC port: %d Server IPC port: %d" % (self._clientIpcPort, self._serverIpcPort) @property def spawnServerDaemon(self): return self._configurationClass.SPAWN_SERVER_DAEMON @property def spawnClientDaemon(self): return self._configurationClass.SPAWN_CLIENT_DAEMON @property def serverIpcPort(self): return self._serverIpcPort @property def clientIpcPort(self): return self._clientIpcPort @property def serverAddress(self): return self._configurationClass.SERVER_ADDRESS @property def serverCoapPort(self): return self._configurationClass.SERVER_COAP_PORT @property def clientCoapPort(self): return self._configurationClass.CLIENT_COAP_PORT @property def serverLogFile(self): return self._configurationClass.SERVER_LOG_FILE @property def clientLogFile(self): return self._configurationClass.CLIENT_LOG_FILE @property def bootstrapConfigFile(self): return self._configurationClass.BOOTSTRAP_CONFIG_FILE @property def objectDefinitionsFile(self): return self._configurationClass.OBJECT_DEFINITIONS_FILE @property def clientEndpointName(self): return self._configurationClass.CLIENT_ENDPOINT_NAME @property def serverIpc(self): return "udp://127.0.0.1:" + str(self.serverIpcPort) @property def clientIpc(self): return "udp://127.0.0.1:" + str(self.clientIpcPort) @property def getServerHost(self): return "localhost" @property def getClientHost(self): return "localhost"
{ "content_hash": "55521ff46bbac091ee95f2f688da0970", "timestamp": "", "source": "github", "line_count": 134, "max_line_length": 135, "avg_line_length": 30.30597014925373, "alnum_prop": 0.6729869490273331, "repo_name": "FlowM2M/AwaLWM2M", "id": "a95a10c8e501484b29426f9d36f8a01947f024a6", "size": "5913", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "api/python/config.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "C", "bytes": "2626522" }, { "name": "C++", "bytes": "1894053" }, { "name": "CMake", "bytes": "44777" }, { "name": "Makefile", "bytes": "10067" }, { "name": "Objective-C", "bytes": "202295" }, { "name": "Python", "bytes": "161260" }, { "name": "Shell", "bytes": "9145" } ], "symlink_target": "" }
from django.db import models from django.utils.translation import ( gettext, gettext_lazy, ) class Category(models.Model): title = models.CharField(max_length=255, verbose_name=gettext('Title')) def __str__(self): return 'Category: {}'.format(self.title) class Meta: ordering = ['title'] verbose_name = gettext_lazy('Category') verbose_name_plural = gettext_lazy('Categories') class Tag(models.Model): title = models.CharField(max_length=255, verbose_name=gettext('Title')) def __str__(self): return 'Tag: {}'.format(self.title) class Meta: ordering = ['title'] verbose_name = gettext_lazy('Tag') verbose_name_plural = gettext_lazy('Tags') class Task(models.Model): title = models.CharField(max_length=255, verbose_name=gettext('Title')) url = models.URLField(verbose_name=gettext('URL'), null=True, blank=True) is_important = models.BooleanField(default=False, verbose_name=gettext('Important?')) due_date = models.DateTimeField(null=True, blank=True, verbose_name=gettext('Due date')) category = models.ForeignKey(Category) parent = models.ForeignKey('self', null=True, blank=True, verbose_name=gettext('Parent task')) tags = models.ManyToManyField(Tag, blank=True) def __str__(self): return 'Task: {}'.format(self.title) class Meta: ordering = ['title'] verbose_name = gettext_lazy('Task') verbose_name_plural = gettext_lazy('Task')
{ "content_hash": "0b4875379b6bd24b9511e8693e4323e1", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 98, "avg_line_length": 32.170212765957444, "alnum_prop": 0.6547619047619048, "repo_name": "dArignac/scapegoat", "id": "40fc07c00285050f0085e354b812a4c04d834fe8", "size": "1512", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scapegoat/models.py", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "1155" }, { "name": "Makefile", "bytes": "1236" }, { "name": "Python", "bytes": "5921" } ], "symlink_target": "" }
import sys import unittest from mock import MagicMock from libcloud.test import MockHttp from libcloud.utils.py3 import httplib from libcloud.dns.drivers.zonomi import ZonomiDNSDriver from libcloud.test.secrets import DNS_PARAMS_ZONOMI from libcloud.test.file_fixtures import DNSFileFixtures from libcloud.dns.types import RecordType from libcloud.dns.types import ZoneDoesNotExistError, ZoneAlreadyExistsError from libcloud.dns.types import RecordDoesNotExistError from libcloud.dns.types import RecordAlreadyExistsError from libcloud.dns.base import Zone, Record class ZonomiTests(unittest.TestCase): def setUp(self): ZonomiDNSDriver.connectionCls.conn_class = ZonomiMockHttp ZonomiMockHttp.type = None self.driver = ZonomiDNSDriver(*DNS_PARAMS_ZONOMI) self.test_zone = Zone(id='zone.com', domain='zone.com', driver=self.driver, type='master', ttl=None, extra={}) self.test_record = Record(id='record.zone.com', name='record.zone.com', data='127.0.0.1', type='A', zone=self.test_zone, driver=self, extra={}) def test_list_record_types(self): record_types = self.driver.list_record_types() self.assertEqual(len(record_types), 3) self.assertTrue(RecordType.A in record_types) self.assertTrue(RecordType.MX in record_types) self.assertTrue(RecordType.TXT in record_types) def test_list_zones_empty(self): ZonomiMockHttp.type = 'EMPTY_ZONES_LIST' zones = self.driver.list_zones() self.assertEqual(zones, []) def test_list_zones_success(self): zones = self.driver.list_zones() self.assertEqual(len(zones), 3) zone = zones[0] self.assertEqual(zone.id, 'thegamertest.com') self.assertEqual(zone.domain, 'thegamertest.com') self.assertEqual(zone.type, 'master') self.assertIsNone(zone.ttl) self.assertEqual(zone.driver, self.driver) second_zone = zones[1] self.assertEqual(second_zone.id, 'lonelygamer.com') self.assertEqual(second_zone.domain, 'lonelygamer.com') self.assertEqual(second_zone.type, 'master') self.assertIsNone(second_zone.ttl) self.assertEqual(second_zone.driver, self.driver) third_zone = zones[2] self.assertEqual(third_zone.id, 'gamertest.com') self.assertEqual(third_zone.domain, 'gamertest.com') self.assertEqual(third_zone.type, 'master') self.assertIsNone(third_zone.ttl) self.assertEqual(third_zone.driver, self.driver) def test_get_zone_GET_ZONE_DOES_NOT_EXIST(self): ZonomiMockHttp.type = 'GET_ZONE_DOES_NOT_EXIST' try: self.driver.get_zone('testzone.com') except ZoneDoesNotExistError as e: self.assertEqual(e.zone_id, 'testzone.com') else: self.fail('Exception was not thrown.') def test_get_zone_GET_ZONE_SUCCESS(self): ZonomiMockHttp.type = 'GET_ZONE_SUCCESS' zone = self.driver.get_zone(zone_id='gamertest.com') self.assertEqual(zone.id, 'gamertest.com') self.assertEqual(zone.domain, 'gamertest.com') self.assertEqual(zone.type, 'master') self.assertIsNone(zone.ttl) self.assertEqual(zone.driver, self.driver) def test_delete_zone_DELETE_ZONE_DOES_NOT_EXIST(self): ZonomiMockHttp.type = 'DELETE_ZONE_DOES_NOT_EXIST' try: self.driver.delete_zone(zone=self.test_zone) except ZoneDoesNotExistError as e: self.assertEqual(e.zone_id, self.test_zone.id) else: self.fail('Exception was not thrown.') def test_delete_zone_delete_zone_success(self): ZonomiMockHttp.type = 'DELETE_ZONE_SUCCESS' status = self.driver.delete_zone(zone=self.test_zone) self.assertEqual(status, True) def test_create_zone_already_exists(self): ZonomiMockHttp.type = 'CREATE_ZONE_ALREADY_EXISTS' try: self.driver.create_zone(domain='gamertest.com') except ZoneAlreadyExistsError as e: self.assertEqual(e.zone_id, 'gamertest.com') else: self.fail('Exception was not thrown.') def test_create_zone_create_zone_success(self): ZonomiMockHttp.type = 'CREATE_ZONE_SUCCESS' zone = self.driver.create_zone(domain='myzone.com') self.assertEqual(zone.id, 'myzone.com') self.assertEqual(zone.domain, 'myzone.com') self.assertEqual(zone.type, 'master') self.assertIsNone(zone.ttl) def test_list_records_empty_list(self): ZonomiMockHttp.type = 'LIST_RECORDS_EMPTY_LIST' pass def test_list_records_success(self): ZonomiMockHttp.type = 'LIST_RECORDS_SUCCESS' records = self.driver.list_records(zone=self.test_zone) self.assertEqual(len(records), 4) record = records[0] self.assertEqual(record.id, 'zone.com') self.assertEqual(record.type, 'SOA') self.assertEqual(record.data, 'ns1.zonomi.com. soacontact.zonomi.com. 13') self.assertEqual(record.name, 'zone.com') self.assertEqual(record.zone, self.test_zone) second_record = records[1] self.assertEqual(second_record.id, 'zone.com') self.assertEqual(second_record.name, 'zone.com') self.assertEqual(second_record.type, 'NS') self.assertEqual(second_record.data, 'ns1.zonomi.com') self.assertEqual(second_record.zone, self.test_zone) third_record = records[2] self.assertEqual(third_record.id, 'oltjano') self.assertEqual(third_record.name, 'oltjano') self.assertEqual(third_record.type, 'A') self.assertEqual(third_record.data, '127.0.0.1') self.assertEqual(third_record.zone, self.test_zone) fourth_record = records[3] self.assertEqual(fourth_record.id, 'zone.com') self.assertEqual(fourth_record.name, 'zone.com') self.assertEqual(fourth_record.type, 'NS') self.assertEqual(fourth_record.data, 'ns5.zonomi.com') self.assertEqual(fourth_record.zone, self.test_zone) def test_get_record_does_not_exist(self): ZonomiMockHttp.type = 'GET_RECORD_DOES_NOT_EXIST' zone = Zone(id='zone.com', domain='zone.com', type='master', ttl=None, driver=self.driver) self.driver.get_zone = MagicMock(return_value=zone) record_id = 'nonexistent' try: self.driver.get_record(record_id=record_id, zone_id='zone.com') except RecordDoesNotExistError as e: self.assertEqual(e.record_id, record_id) else: self.fail('Exception was not thrown.') def test_get_record_success(self): ZonomiMockHttp.type = 'GET_RECORD_SUCCESS' zone = Zone(id='zone.com', domain='zone.com', type='master', ttl=None, driver=self.driver) self.driver.get_zone = MagicMock(return_value=zone) record = self.driver.get_record(record_id='oltjano', zone_id='zone.com') self.assertEqual(record.id, 'oltjano') self.assertEqual(record.name, 'oltjano') self.assertEqual(record.type, 'A') self.assertEqual(record.data, '127.0.0.1') def test_delete_record_does_not_exist(self): ZonomiMockHttp.type = 'DELETE_RECORD_DOES_NOT_EXIST' record = self.test_record try: self.driver.delete_record(record=record) except RecordDoesNotExistError as e: self.assertEqual(e.record_id, record.id) else: self.fail('Exception was not thrown.') def test_delete_record_success(self): ZonomiMockHttp.type = 'DELETE_RECORD_SUCCESS' record = self.test_record status = self.driver.delete_record(record=record) self.assertEqual(status, True) def test_create_record_already_exists(self): zone = self.test_zone ZonomiMockHttp.type = 'CREATE_RECORD_ALREADY_EXISTS' try: self.driver.create_record(name='createrecord', type='A', data='127.0.0.1', zone=zone, extra={}) except RecordAlreadyExistsError as e: self.assertEqual(e.record_id, 'createrecord') else: self.fail('Exception was not thrown.') def test_create_record_success(self): ZonomiMockHttp.type = 'CREATE_RECORD_SUCCESS' zone = self.test_zone record = self.driver.create_record(name='createrecord', zone=zone, type='A', data='127.0.0.1', extra={}) self.assertEqual(record.id, 'createrecord') self.assertEqual(record.name, 'createrecord') self.assertEqual(record.type, 'A') self.assertEqual(record.data, '127.0.0.1') self.assertEqual(record.zone, zone) def test_convert_to_slave(self): zone = self.test_zone result = self.driver.ex_convert_to_secondary(zone, '1.2.3.4') self.assertTrue(result) def test_convert_to_slave_couldnt_convert(self): zone = self.test_zone ZonomiMockHttp.type = 'COULDNT_CONVERT' try: self.driver.ex_convert_to_secondary(zone, '1.2.3.4') except ZoneDoesNotExistError as e: self.assertEqual(e.zone_id, 'zone.com') else: self.fail('Exception was not thrown.') def test_convert_to_master(self): zone = self.test_zone result = self.driver.ex_convert_to_master(zone) self.assertTrue(result) def test_convert_to_master_couldnt_convert(self): zone = self.test_zone ZonomiMockHttp.type = 'COULDNT_CONVERT' try: self.driver.ex_convert_to_master(zone) except ZoneDoesNotExistError as e: self.assertEqual(e.zone_id, 'zone.com') else: self.fail('Exception was not thrown.') class ZonomiMockHttp(MockHttp): fixtures = DNSFileFixtures('zonomi') def _app_dns_dyndns_jsp_EMPTY_ZONES_LIST(self, method, url, body, headers): body = self.fixtures.load('empty_zones_list.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp(self, method, url, body, headers): body = self.fixtures.load('list_zones.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_GET_ZONE_DOES_NOT_EXIST(self, method, url, body, headers): body = self.fixtures.load('list_zones.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_GET_ZONE_SUCCESS(self, method, url, body, headers): body = self.fixtures.load('list_zones.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_DELETE_ZONE_DOES_NOT_EXIST(self, method, url, body, headers): body = self.fixtures.load('delete_zone_does_not_exist.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_DELETE_ZONE_SUCCESS(self, method, url, body, headers): body = self.fixtures.load('delete_zone.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_addzone_jsp_CREATE_ZONE_SUCCESS(self, method, url, body, headers): body = self.fixtures.load('create_zone.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_addzone_jsp_CREATE_ZONE_ALREADY_EXISTS(self, method, url, body, headers): body = self.fixtures.load('create_zone_already_exists.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_LIST_RECORDS_EMPTY_LIST(self, method, url, body, headers): body = self.fixtures.load('list_records_empty_list.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_LIST_RECORDS_SUCCESS(self, method, url, body, headers): body = self.fixtures.load('list_records.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_DELETE_RECORD_SUCCESS(self, method, url, body, headers): body = self.fixtures.load('delete_record.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_DELETE_RECORD_DOES_NOT_EXIST(self, method, url, body, headers): body = self.fixtures.load('delete_record_does_not_exist.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_CREATE_RECORD_SUCCESS(self, method, url, body, headers): body = self.fixtures.load('create_record.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_CREATE_RECORD_ALREADY_EXISTS(self, method, url, body, headers): body = self.fixtures.load('create_record_already_exists.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_GET_RECORD_SUCCESS(self, method, url, body, headers): body = self.fixtures.load('list_records.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_dyndns_jsp_GET_RECORD_DOES_NOT_EXIST(self, method, url, body, headers): body = self.fixtures.load('list_records.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_converttosecondary_jsp(self, method, url, body, headers): body = self.fixtures.load('converted_to_slave.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_converttosecondary_jsp_COULDNT_CONVERT(self, method, url, body, headers): body = self.fixtures.load('couldnt_convert.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_converttomaster_jsp(self, method, url, body, headers): body = self.fixtures.load('converted_to_master.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) def _app_dns_converttomaster_jsp_COULDNT_CONVERT(self, method, url, body, headers): body = self.fixtures.load('couldnt_convert.xml') return (httplib.OK, body, {}, httplib.responses[httplib.OK]) if __name__ == '__main__': sys.exit(unittest.main())
{ "content_hash": "8a1ec3af41ec7e95e3381c3479f707a1", "timestamp": "", "source": "github", "line_count": 361, "max_line_length": 79, "avg_line_length": 42.49307479224377, "alnum_prop": 0.6047588005215124, "repo_name": "ByteInternet/libcloud", "id": "230b1caba483a52a9ce724e68cdf8b96db31cfcb", "size": "16113", "binary": false, "copies": "3", "ref": "refs/heads/byte", "path": "libcloud/test/dns/test_zonomi.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "573" }, { "name": "HTML", "bytes": "2545" }, { "name": "PowerShell", "bytes": "410" }, { "name": "Python", "bytes": "7579600" }, { "name": "Shell", "bytes": "5936" } ], "symlink_target": "" }
import sys import os import traceback from optparse import OptionParser, OptionGroup # Path adjustment for non-dev deployments local_libdir = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../lib') if os.path.exists(local_libdir): sys.path.insert(0, local_libdir) import tstracker.config.config_defaults as config_defaults import terasaur.config.config_helper as config_helper from tstracker.tstrackermq_cli import TerasaurTrackerMQCli import terasaur.log.log_helper as log_helper _LOGGER_NAME = 'tstrackermq' def _get_option_parser(): usage = """%prog [options] <command> <files> Available commands: add Add a torrent or add a seed bank to a torrent remove Remove a torrent or remove a seed bank from a torrent4 list List torrents or list seed banks for a given torrent stats Stats functions: push stats to terasaur, capture data""" #update Update information for a torrent parser = OptionParser(usage=usage, version='%prog 1.0', description='Terasaur tracker MQ CLI') parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Enable verbose output") parser.add_option("--debug", action="store_true", dest="debug", default=False, help="Enable debug output and stacktraces") parser.add_option('--infohash', dest='info_hash', type='string', metavar="SHA1", help='Specify info hash', default=None) parser.add_option('--seedbank', dest='seedbank', type='string', metavar="IP_PORT", help='Seed bank ip address and port (addr:port)', default=None) parser.add_option("--usemq", action="store_true", dest="usemq", default=False, help="Use the MQ instead of direct-to-mongodb") stats_group = OptionGroup(parser, 'Stats options', '') stats_group.add_option("--full", action="store_true", dest="stats_full", default=False, help="Send updates for all torrents") stats_group.add_option("--incremental", action="store_true", dest="stats_incremental", default=False, help="Send updates for torrents changed since the last run") stats_group.add_option("--capture", action="store_true", dest="stats_capture", default=False, help="Take a stats snapshot") stats_group.add_option("--init", action="store_true", dest="stats_init", default=False, help="Initialize stats database") parser.add_option_group(stats_group) #update_group = OptionGroup(parser, 'Update options', '') #update_group.add_option('--published', dest='published', type='string', metavar="DATE", help='Published datetime (yyyy-mm-ddThh:mm:ss.mmmZ)', default=None) #parser.add_option_group(update_group) return parser def _parse_args(): parser = _get_option_parser() (options, args) = parser.parse_args() if len(args) == 0: parser.print_usage() options = None args = None return (parser, options, args) def main(): (parser, options, args) = _parse_args() if options is None: return # nasty hack # TODO: refactor log_helper and log_init modules log_helper._LOG_NAME = _LOGGER_NAME try: config_defaults.init() ch = config_helper.ConfigHelper() config = ch.get_config() cli = TerasaurTrackerMQCli(config) cli.execute(options, args) except Exception, e: print 'ERROR: ' + str(e) if options.debug is True: traceback.print_exc() if __name__ == '__main__': main()
{ "content_hash": "26296c75b8d508630a57b932a7fc3608", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 166, "avg_line_length": 41.68604651162791, "alnum_prop": 0.6552301255230125, "repo_name": "terasaur/tstracker", "id": "3c127b52712b84b9b4a59ab8f1dd544659adac2f", "size": "4190", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "mqclient/src/mqcli.py", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "227474" }, { "name": "C++", "bytes": "158712" }, { "name": "Python", "bytes": "52835" }, { "name": "Shell", "bytes": "1926" } ], "symlink_target": "" }
import sys import dns import dns.exception import dns.name import dns.query import dns.resolver import re from sentry_sdk import capture_exception from lemur.extensions import metrics class DNSError(Exception): """Base class for DNS Exceptions.""" pass class BadDomainError(DNSError): """Error for when a Bad Domain Name is given.""" def __init__(self, message): self.message = message class DNSResolveError(DNSError): """Error for DNS Resolution Errors.""" def __init__(self, message): self.message = message def is_valid_domain(domain): """Checks if a domain is syntactically valid and returns a bool""" if domain[-1] == ".": domain = domain[:-1] if len(domain) > 253: return False fqdn_re = re.compile("(?=^.{1,63}$)(^(?:[a-z0-9_](?:-*[a-z0-9_])+)$|^[a-z0-9]$)", re.IGNORECASE) return all(fqdn_re.match(d) for d in domain.split(".")) def get_authoritative_nameserver(domain): """Get the authoritative nameservers for the given domain""" if not is_valid_domain(domain): raise BadDomainError(f"{domain} is not a valid FQDN") n = dns.name.from_text(domain) depth = 2 default = dns.resolver.get_default_resolver() nameserver = default.nameservers[0] last = False while not last: s = n.split(depth) last = s[0].to_unicode() == u"@" sub = s[1] query = dns.message.make_query(sub, dns.rdatatype.NS) response = dns.query.udp(query, nameserver) rcode = response.rcode() if rcode != dns.rcode.NOERROR: function = sys._getframe().f_code.co_name metrics.send(f"{function}.error", "counter", 1) if rcode == dns.rcode.NXDOMAIN: raise DNSResolveError(f"{sub} does not exist.") else: raise DNSResolveError(f"Error: {dns.rcode.to_text(rcode)}") if len(response.authority) > 0: rrset = response.authority[0] else: rrset = response.answer[0] rr = rrset[0] if rr.rdtype != dns.rdatatype.SOA: authority = rr.target nameserver = default.query(authority).rrset[0].to_text() depth += 1 return nameserver def get_dns_records(domain, rdtype, nameserver): """Retrieves the DNS records matching the name and type and returns a list of records""" records = [] try: dns_resolver = dns.resolver.Resolver() dns_resolver.nameservers = [nameserver] dns_response = dns_resolver.query(domain, rdtype) for rdata in dns_response: for record in rdata.strings: records.append(record.decode("utf-8")) except dns.exception.DNSException: capture_exception() function = sys._getframe().f_code.co_name metrics.send(f"{function}.fail", "counter", 1) return records
{ "content_hash": "60e47515a408eadef61de985ffb2e5e1", "timestamp": "", "source": "github", "line_count": 101, "max_line_length": 100, "avg_line_length": 28.663366336633665, "alnum_prop": 0.6107081174438688, "repo_name": "Netflix/lemur", "id": "62a988b80474849431ed16060de397d0550e9c01", "size": "2895", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "lemur/dns_providers/util.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "2728" }, { "name": "Dockerfile", "bytes": "2597" }, { "name": "HTML", "bytes": "314713" }, { "name": "JavaScript", "bytes": "15496" }, { "name": "Makefile", "bytes": "3791" }, { "name": "Mako", "bytes": "412" }, { "name": "Python", "bytes": "1530505" }, { "name": "Shell", "bytes": "2339" } ], "symlink_target": "" }
import os from .utils import make_dir, INSTANCE_FOLDER_PATH class BaseConfig(object): PROJECT = 'server' NAME = PROJECT # Get app root path, also can use flask.root_path. # ../../config.py PROJECT_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) DEBUG = False TESTING = False # http://flask.pocoo.org/docs/quickstart/#sessions SECRET_KEY = 'secret key' LOG_FOLDER = os.path.join(INSTANCE_FOLDER_PATH, 'logs') make_dir(LOG_FOLDER) ADMINS = ['robin.andeer@scilifelab.se'] class DefaultConfig(BaseConfig): NAME = 'scout' DEBUG = True # Flask-cache: http://pythonhosted.org/Flask-Cache/ CACHE_TYPE = 'simple' CACHE_DEFAULT_TIMEOUT = 60 TORNADO_PORT = 8082 # Flask-mail: http://pythonhosted.org/flask-mail/ # https://bitbucket.org/danjac/flask-mail/issue/3/problem-with-gmails-smtp-server MAIL_DEBUG = DEBUG MAIL_SERVER = 'smtp.gmail.com' MAIL_PORT = 587 MAIL_USE_TLS = True MAIL_USE_SSL = False # Put real MAIL_USERNAME and MAIL_PASSWORD under instance folder MAIL_USERNAME = 'yourmail@gmail.com' MAIL_PASSWORD = 'yourpass' MAIL_DEFAULT_SENDER = MAIL_USERNAME class TestConfig(BaseConfig): TESTING = True
{ "content_hash": "2dc13f0fbcf2dbe5eaf70e56fc78930a", "timestamp": "", "source": "github", "line_count": 54, "max_line_length": 83, "avg_line_length": 22.185185185185187, "alnum_prop": 0.6986644407345576, "repo_name": "robinandeer/scout", "id": "72ec75b3352b84c2b62761721bc72e751f6a86e7", "size": "1222", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "server/config.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "26845" }, { "name": "CoffeeScript", "bytes": "58426" }, { "name": "Python", "bytes": "45820" }, { "name": "Ruby", "bytes": "1968" }, { "name": "Shell", "bytes": "6699" } ], "symlink_target": "" }
""" `paddle.v2.layer` is a part of model config packages in paddle.v2. In API v2, we want to make Paddle a plain Python package. The model config package defined the way how to configure a neural network topology in Paddle Python code. The primary usage shows below. .. code-block:: python import paddle.v2 as paddle img = paddle.layer.data(name='img', type=paddle.data_type.dense_vector(784)) hidden = paddle.layer.fc(input=img, size=200) prediction = paddle.layer.fc(input=hidden, size=10, act=paddle.activation.Softmax()) # use prediction instance where needed. parameters = paddle.parameters.create(cost) """ import collections import inspect from config_base import Layer, __convert_to_v2__ import paddle.trainer_config_helpers as conf_helps from paddle.trainer_config_helpers.config_parser_utils import \ parse_network_config as __parse__ from paddle.trainer_config_helpers.default_decorators import wrap_act_default from paddle.trainer_config_helpers.default_decorators import \ wrap_bias_attr_default from paddle.trainer_config_helpers.default_decorators import wrap_name_default from paddle.trainer_config_helpers.layers import layer_support from paddle.trainer.config_parser import \ RecurrentLayerGroupWithoutOutLinksBegin, RecurrentLayerGroupSetOutLink, \ RecurrentLayerGroupEnd, model_type import activation import re import data_type __all__ = ['parse_network', 'data'] def parse_network(*outputs): """ Parse all output layers and then generate a ModelConfig object. .. note:: This function is used internally in paddle.v2 module. User should never invoke this method. :param outputs: Output layers. :type outputs: Layer :return: A ModelConfig object instance. :rtype: ModelConfig """ def __real_func__(): """ __real_func__ is the function that config_parser.parse invoked. It is the plain old paddle configuration function. """ context = dict() real_output = [each.to_proto(context=context) for each in outputs] conf_helps.outputs(real_output) return __parse__(__real_func__) """ Some layer may need some special config, and can not use __convert_to_v2__ to convert. So we also need to implement some special LayerV2. """ class DataLayerV2(Layer): METHOD_NAME = 'data_layer' def __init__(self, name, type, **kwargs): assert isinstance(type, data_type.InputType) self.type = type self.__method_name__ = 'data_layer' self.__kwargs__ = kwargs super(DataLayerV2, self).__init__(name=name, parent_layers=dict()) def to_proto_impl(self, **kwargs): args = dict() args['size'] = self.type.dim for each in kwargs: args[each] = kwargs[each] for each in self.__kwargs__: args[each] = self.__kwargs__[each] return getattr(conf_helps, self.__method_name__)(name=self.name, **args) def __map_docstr__(doc): doc = re.sub(r'(data = [^\)]+)\).*', "data = paddle.layer.data(name=\"input\", " "type=paddle.data_type.dense_vector(1000))", doc) doc = re.sub(r':param size:.*', ':param type: Data type of this data layer', doc) doc = re.sub(r':type size:.*', ":type size: paddle.v2.data_type.InputType", doc) return doc class WithExtraParent(Layer): def extra_parent(self): return self.__extra_parent__ def __init__(self, name=None, parent_layers=None): self.__extra_parent__ = [] super(WithExtraParent, self).__init__( name=name, parent_layers=parent_layers) def append_extra_parent(self, parent): self.__extra_parent__.append(parent) def to_proto(self, context): """ function to set proto attribute """ kwargs = dict() for p in self.__extra_parent__: p.to_proto(context=context) for layer_name in self.__parent_layers__: if not isinstance(self.__parent_layers__[layer_name], collections.Sequence): v1_layer = self.__parent_layers__[layer_name].to_proto( context=context) else: v1_layer = map(lambda x: x.to_proto(context=context), self.__parent_layers__[layer_name]) kwargs[layer_name] = v1_layer if self.context_name() is None: return self.to_proto_impl(context=context, **kwargs) elif self.context_name() not in context: context[self.context_name()] = self.to_proto_impl( context=context, **kwargs) if self.use_context_name(): return context[self.context_name()] else: return context[self.name] class MemoryV2(WithExtraParent): def __init__(self, name, **kwargs): self.name = name super(MemoryV2, self).__init__(name=name, parent_layers=dict()) self.__kwargs__ = kwargs self.__boot_layer_name__ = None if 'boot_layer' in kwargs: begin_of_current_rnn = [] # TODO(yuyang18): Fix inspect, it could be wrong when user invoke a # function inside step. st = inspect.stack() for i in xrange(len(st)): locs = inspect.stack()[i][0].f_locals keys = locs.keys() for key in keys: val = locs[key] if isinstance(val, RecurrentLayerInput): begin_of_current_rnn.append(val) elif isinstance(val, collections.Sequence): for v in val: if isinstance(v, RecurrentLayerInput): begin_of_current_rnn.append(v) if begin_of_current_rnn: break assert begin_of_current_rnn is not None for extra in begin_of_current_rnn: self.append_extra_parent(extra) assert isinstance(extra, WithExtraParent) extra.append_extra_parent(kwargs['boot_layer']) self.__boot_layer_name__ = kwargs['boot_layer'].name def to_proto_impl(self, context, **kwargs): args = dict() for each in kwargs: args[each] = kwargs[each] for each in self.__kwargs__: args[each] = self.__kwargs__[each] if self.__boot_layer_name__ is not None: args['boot_layer'] = context[self.__boot_layer_name__] size = args.get('size', None) if size is not None: if callable(size): real_size = size() else: real_size = size args['size'] = real_size return conf_helps.memory(name=self.name, **args) def context_name(self): return self.name + "#memory" def use_context_name(self): """ memory layer will have the same name with some layer :return: """ return True class LayerOutputV2(Layer): """ LayerOutputV2 is used to store the result of LayerOutput in v1 api. It will not store it's parents because layer_output has been parsed already. """ def __init__(self, layer_output): assert isinstance(layer_output, conf_helps.LayerOutput) self.layer_output = layer_output super(LayerOutputV2, self).__init__( name=layer_output.name, parent_layers=dict()) def to_proto_impl(self): return self.layer_output class StaticInputV2(object): def __init__(self, input, is_seq=False, size=None): assert isinstance(input, LayerV2) self.name = input.name self.input = input self.is_seq = is_seq self.size = size # TODO(add size check) # assert input.size is not None or size is not None class MixedLayerV2(Layer): """ This class is use to support `with` grammar. If not, the following code could convert mixed_layer simply. mixed = __convert_to_v2__( 'mixed_layer', name_prefix='mixed', parent_names=['input']) """ class AddToSealedMixedLayerExceptionV2(Exception): pass def __init__(self, size=0, input=None, name=None, act=None, bias_attr=None, layer_attr=None): self.__method_name__ = 'mixed_layer' self.finalized = False self.__inputs__ = [] if input is not None: self.__inputs__ = input other_kwargs = dict() other_kwargs['name'] = name other_kwargs['size'] = size other_kwargs['act'] = act other_kwargs['bias_attr'] = bias_attr other_kwargs['layer_attr'] = layer_attr parent_layers = {"input": self.__inputs__} super(MixedLayerV2, self).__init__(name, parent_layers) self.__other_kwargs__ = other_kwargs def __iadd__(self, other): if not self.finalized: self.__inputs__.append(other) return self else: raise MixedLayerV2.AddToSealedMixedLayerExceptionV2() def __enter__(self): assert len(self.__inputs__) == 0 return self def __exit__(self, *args, **kwargs): self.finalized = True def to_proto_impl(self, **kwargs): args = dict() for each in kwargs: args[each] = kwargs[each] for each in self.__other_kwargs__: args[each] = self.__other_kwargs__[each] size = args.get('size', None) if size is not None: if callable(size): real_size = size() else: real_size = size args['size'] = real_size return getattr(conf_helps, self.__method_name__)(**args) @wrap_name_default("mixed") @wrap_act_default(act=activation.Linear()) @wrap_bias_attr_default(has_bias=False) @layer_support(conf_helps.layers.ERROR_CLIPPING, conf_helps.layers.DROPOUT) def mixed(size=0, name=None, input=None, act=None, bias_attr=False, layer_attr=None): return MixedLayerV2(size, input, name, act, bias_attr, layer_attr) class RecurrentLayerInput(WithExtraParent): def __init__(self, recurrent_name, index, parent_layers): assert len(parent_layers) == 1 self.__parents__ = parent_layers.values()[0] super(RecurrentLayerInput, self).__init__( name=self.__parents__[index].name, parent_layers=parent_layers) self.__recurrent_name__ = recurrent_name def context_name(self): return self.__recurrent_name__ + ".begin" def to_proto_impl(self, context, **kwargs): model_type('recurrent_nn') RecurrentLayerGroupWithoutOutLinksBegin( name=self.__recurrent_name__, in_links=map(lambda x: x.name, self.__parents__)) return self class RecurrentLayerOutput(Layer): def __init__(self, recurrent_name, index, parent_layers): assert len(parent_layers) == 1 self.__parents__ = parent_layers.values()[0] super(RecurrentLayerOutput, self).__init__( name=self.__parents__[index].name, parent_layers=parent_layers) self.__recurrent_name__ = recurrent_name def context_name(self): return self.__recurrent_name__ + ".end" def to_proto_impl(self, **kwargs): for l in self.__parents__: RecurrentLayerGroupSetOutLink(l.name) RecurrentLayerGroupEnd(name=self.__recurrent_name__) LayerV2 = Layer data = DataLayerV2 data.__name__ = 'data' AggregateLevel = conf_helps.layers.AggregateLevel ExpandLevel = conf_helps.layers.ExpandLevel memory = MemoryV2 def __layer_name_mapping__(inname): if inname in ['data_layer', 'memory', 'mixed_layer', 'recurrent_group']: # Do Not handle these layers return elif inname == 'maxid_layer': return 'max_id' elif inname.endswith('memory') or inname.endswith( '_seq') or inname.endswith('_sim') or inname == 'hsigmoid': return inname elif inname in [ 'cross_entropy', 'multi_binary_label_cross_entropy', 'cross_entropy_with_selfnorm' ]: return inname + "_cost" elif inname.endswith('_cost'): return inname elif inname.endswith("_layer"): return inname[:-len("_layer")] def __layer_name_mapping_parent_names__(inname): all_args = getattr(conf_helps, inname).argspec.args return filter( lambda x: x in ['input1', 'input2', 'label', 'input', 'a', 'b', 'expand_as', 'weights', 'vectors', 'weight', 'score', 'left', 'right', 'output_mem'], all_args) def __convert_layer__(_new_name_, _old_name_, _parent_names_): global __all__ __all__.append(_new_name_) globals()[new_name] = __convert_to_v2__(_old_name_, _parent_names_) globals()[new_name].__name__ = new_name for each_layer_name in dir(conf_helps): new_name = __layer_name_mapping__(each_layer_name) if new_name is not None: parent_names = __layer_name_mapping_parent_names__(each_layer_name) assert len(parent_names) != 0, each_layer_name __convert_layer__(new_name, each_layer_name, parent_names) del parent_names del new_name del each_layer_name @wrap_name_default() def recurrent_group(step, input, name=None): if not isinstance(input, collections.Sequence): input = [input] non_static_inputs = filter(lambda x: not isinstance(x, StaticInputV2), input) actual_input = [ RecurrentLayerInput( recurrent_name=name, index=i, parent_layers={'recurrent_inputs': non_static_inputs}) for i in xrange(len(non_static_inputs)) ] def __real_step__(*args): rnn_input = list(args) static_inputs = filter(lambda x: isinstance(x, StaticInputV2), input) for static_input in static_inputs: mem_name = "__%s_memory__" % static_input.input.name mem = memory( name=mem_name, is_seq=static_input.is_seq, size=static_input.input.calculate_size, boot_layer=static_input.input) with mixed( name=mem_name, size=static_input.input.calculate_size, act=activation.Identity()) as mix: mix += identity_projection(input=mem) rnn_input.insert(input.index(static_input), mix) return step(*rnn_input) actual_output = __real_step__(*actual_input) if not isinstance(actual_output, collections.Sequence): actual_output = [actual_output] retv = [ RecurrentLayerOutput( recurrent_name=name, index=i, parent_layers={'recurrent_outputs': actual_output}) for i in xrange(len(actual_output)) ] if len(retv) == 1: return retv[0] else: return retv __projection_names__ = filter(lambda x: x.endswith('_projection'), dir(conf_helps)) __all__ += __projection_names__ __operator_names__ = filter(lambda x: x.endswith('_operator'), dir(conf_helps)) __all__ += __operator_names__ # convert projection for prj in __projection_names__: globals()[prj] = __convert_to_v2__( prj, parent_names=['input'], is_default_name=False) globals()[prj].__name__ = prj # convert operator operator_list = [ # [V1_method_name, parent_names], ['dotmul_operator', ['a', 'b']], ['conv_operator', ['img', 'filter']] ] for op in operator_list: globals()[op[0]] = __convert_to_v2__( op[0], parent_names=op[1], is_default_name=False) globals()[op[0]].__name__ = op[0]
{ "content_hash": "18eb01de56786a47658491e206320304", "timestamp": "", "source": "github", "line_count": 485, "max_line_length": 86, "avg_line_length": 33.02061855670103, "alnum_prop": 0.5815797689665938, "repo_name": "gangliao/Paddle", "id": "1e4efedde363f20fde168941adcb6e8a594b533a", "size": "16624", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "python/paddle/v2/layer.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "195950" }, { "name": "C++", "bytes": "3216654" }, { "name": "CMake", "bytes": "120429" }, { "name": "CSS", "bytes": "21730" }, { "name": "Cuda", "bytes": "459718" }, { "name": "HTML", "bytes": "9018" }, { "name": "JavaScript", "bytes": "1025" }, { "name": "Perl", "bytes": "11452" }, { "name": "Protocol Buffer", "bytes": "43517" }, { "name": "Python", "bytes": "989667" }, { "name": "Shell", "bytes": "108541" } ], "symlink_target": "" }
from django.core.cache.backends.dummy import DummyCache class FakeRedisCache(DummyCache): # pragma: no cover def __init__(self, *args, **kwargs): DummyCache.__init__(self, *args, **kwargs) self.setex = None self.lrem = None self.zadd = None self.pipeline = None self.ttl = None @property def client(self): return self def close(self, **kwargs): pass
{ "content_hash": "a21cfb94e09edc1e198c0a90f037c576", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 55, "avg_line_length": 22.94736842105263, "alnum_prop": 0.5871559633027523, "repo_name": "Benoss/django-cookiecutter", "id": "5c05fac42f8f0fb38488d52f47157813cb6f2722", "size": "460", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/contrib/fake_redis.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "63" }, { "name": "HTML", "bytes": "4539" }, { "name": "JavaScript", "bytes": "2147" }, { "name": "Python", "bytes": "24711" }, { "name": "Shell", "bytes": "4879" } ], "symlink_target": "" }
import string template = string.Template(""" # Show what aspects might need attention cobbler check # Act on the 'check' above, then re-check until satisfactory. # (Details beyond the scope of this particular example.) # Import a client OS from a DVD. This automatically sets up a "distro" and names it. # (See below ISO file variant.) cobbler import --path=$__contrail_distro_path__ --name=$__contrail_distro_prefix__ --arch=x86_64 # Create a profile (e.g. "rhel5_workstation") and associate it with that distro cobbler profile add --name=$__contrail_profile_name__ --distro=${__contrail_distro_prefix__}-x86_64 # Set up a kickstart file. # (Details beyond the scope of this particular example.) # Associate a kickstart file with this profile cobbler profile edit --name=$__contrail_profile_name__ --kickstart=$__contrail_kickstart_config__ # Register a client machine (e.g. "workstation1") and its network details # and associate it with a profile $__contrail_cobbler_system_add_commands__ # Get a detailed report of everything in cobbler cobbler report # Get cobbler to act on all the above (set up DHCP, etc.) cobbler sync """)
{ "content_hash": "e7dec21f426da6d7bd09f8739e0b77c6", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 99, "avg_line_length": 35.6875, "alnum_prop": 0.7390542907180385, "repo_name": "Juniper/contrail-provisioning", "id": "2c776f0f10286018d4640584c00f1db8c5535e5c", "size": "1142", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "contrail_provisioning/common/templates/cobbler_commands_template.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "483390" }, { "name": "Shell", "bytes": "304085" } ], "symlink_target": "" }
""" Created on Sat Jan 24 15:20:47 2015 @author: rlabbe """ import numpy as np import matplotlib.pyplot as plt try: data except: cols = ('time','millis','ax','ay','az','rollrate','pitchrate','yawrate', 'roll','pitch','yaw','speed','course','latitude','longitude', 'altitude','pdop','hdop','vdop','epe') data = np.genfromtxt('2014-03-26-000-Data.csv', delimiter=',', names=True, usecols=cols).view(np.recarray) plt.subplot(311) plt.plot(data.ax) plt.subplot(312) plt.plot(data.speed) plt.subplot(313) plt.plot(data.course) plt.tight_layout() plt.figure() plt.subplot(311) plt.plot(data.pitch) plt.subplot(312) plt.plot(data.roll) plt.subplot(313) plt.plot(data.yaw) plt.figure() plt.plot(data.longitude, data.latitude)
{ "content_hash": "0f8cf63f35525b19dce352b51ce42799", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 76, "avg_line_length": 21.216216216216218, "alnum_prop": 0.6509554140127388, "repo_name": "zaqwes8811/micro-apps", "id": "5353759302bd04cbb3d55a4e8c45511607638525", "size": "809", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "self_driving/deps/Kalman_and_Bayesian_Filters_in_Python_master/experiments/balzer.py", "mode": "33188", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "309556" }, { "name": "Assembly", "bytes": "570069" }, { "name": "Batchfile", "bytes": "56007" }, { "name": "C", "bytes": "53062" }, { "name": "C#", "bytes": "32208" }, { "name": "C++", "bytes": "1108629" }, { "name": "CMake", "bytes": "23718" }, { "name": "CSS", "bytes": "186903" }, { "name": "Cuda", "bytes": "9680" }, { "name": "Dart", "bytes": "1158" }, { "name": "Dockerfile", "bytes": "20181" }, { "name": "Go", "bytes": "6640" }, { "name": "HTML", "bytes": "2215958" }, { "name": "Haskell", "bytes": "383" }, { "name": "Java", "bytes": "140401" }, { "name": "JavaScript", "bytes": "714877" }, { "name": "Jupyter Notebook", "bytes": "25399728" }, { "name": "Kotlin", "bytes": "713" }, { "name": "Lua", "bytes": "2253" }, { "name": "MATLAB", "bytes": "103" }, { "name": "Makefile", "bytes": "33566" }, { "name": "Mako", "bytes": "824" }, { "name": "NSIS", "bytes": "7481" }, { "name": "PHP", "bytes": "59915" }, { "name": "Pascal", "bytes": "2492" }, { "name": "Pawn", "bytes": "3337" }, { "name": "Python", "bytes": "1836093" }, { "name": "QML", "bytes": "58517" }, { "name": "QMake", "bytes": "4042" }, { "name": "R", "bytes": "13753" }, { "name": "Ruby", "bytes": "522" }, { "name": "Rust", "bytes": "210" }, { "name": "Scheme", "bytes": "113588" }, { "name": "Scilab", "bytes": "1348" }, { "name": "Shell", "bytes": "16112" }, { "name": "SourcePawn", "bytes": "3316" }, { "name": "VBScript", "bytes": "9376" }, { "name": "XSLT", "bytes": "24926" } ], "symlink_target": "" }
"""try to find more bugs in the code using astroid inference capabilities """ import re import shlex import astroid from astroid import InferenceError, NotFoundError, YES, Instance from astroid.bases import BUILTINS from pylint.interfaces import IAstroidChecker, INFERENCE, INFERENCE_FAILURE from pylint.checkers import BaseChecker from pylint.checkers.utils import ( safe_infer, is_super, check_messages, decorated_with_property) MSGS = { 'E1101': ('%s %r has no %r member', 'no-member', 'Used when a variable is accessed for an unexistent member.', {'old_names': [('E1103', 'maybe-no-member')]}), 'E1102': ('%s is not callable', 'not-callable', 'Used when an object being called has been inferred to a non \ callable object'), 'E1111': ('Assigning to function call which doesn\'t return', 'assignment-from-no-return', 'Used when an assignment is done on a function call but the \ inferred function doesn\'t return anything.'), 'W1111': ('Assigning to function call which only returns None', 'assignment-from-none', 'Used when an assignment is done on a function call but the \ inferred function returns nothing but None.'), 'E1120': ('No value for argument %s in %s call', 'no-value-for-parameter', 'Used when a function call passes too few arguments.'), 'E1121': ('Too many positional arguments for %s call', 'too-many-function-args', 'Used when a function call passes too many positional \ arguments.'), 'E1123': ('Unexpected keyword argument %r in %s call', 'unexpected-keyword-arg', 'Used when a function call passes a keyword argument that \ doesn\'t correspond to one of the function\'s parameter names.'), 'E1124': ('Argument %r passed by position and keyword in %s call', 'redundant-keyword-arg', 'Used when a function call would result in assigning multiple \ values to a function parameter, one value from a positional \ argument and one from a keyword argument.'), 'E1125': ('Missing mandatory keyword argument %r in %s call', 'missing-kwoa', ('Used when a function call does not pass a mandatory' ' keyword-only argument.'), {'minversion': (3, 0)}), 'E1126': ('Sequence index is not an int, slice, or instance with __index__', 'invalid-sequence-index', 'Used when a sequence type is indexed with an invalid type. ' 'Valid types are ints, slices, and objects with an __index__ ' 'method.'), 'E1127': ('Slice index is not an int, None, or instance with __index__', 'invalid-slice-index', 'Used when a slice index is not an integer, None, or an object \ with an __index__ method.'), } # builtin sequence types in Python 2 and 3. SEQUENCE_TYPES = set(['str', 'unicode', 'list', 'tuple', 'bytearray', 'xrange', 'range', 'bytes', 'memoryview']) def _determine_callable(callable_obj): # Ordering is important, since BoundMethod is a subclass of UnboundMethod, # and Function inherits Lambda. if isinstance(callable_obj, astroid.BoundMethod): # Bound methods have an extra implicit 'self' argument. return callable_obj, 1, callable_obj.type elif isinstance(callable_obj, astroid.UnboundMethod): return callable_obj, 0, 'unbound method' elif isinstance(callable_obj, astroid.Function): return callable_obj, 0, callable_obj.type elif isinstance(callable_obj, astroid.Lambda): return callable_obj, 0, 'lambda' elif isinstance(callable_obj, astroid.Class): # Class instantiation, lookup __new__ instead. # If we only find object.__new__, we can safely check __init__ # instead. try: # Use the last definition of __new__. new = callable_obj.local_attr('__new__')[-1] except astroid.NotFoundError: new = None if not new or new.parent.scope().name == 'object': try: # Use the last definition of __init__. callable_obj = callable_obj.local_attr('__init__')[-1] except astroid.NotFoundError: # do nothing, covered by no-init. raise ValueError else: callable_obj = new if not isinstance(callable_obj, astroid.Function): raise ValueError # both have an extra implicit 'cls'/'self' argument. return callable_obj, 1, 'constructor' else: raise ValueError class TypeChecker(BaseChecker): """try to find bugs in the code using type inference """ __implements__ = (IAstroidChecker,) # configuration section name name = 'typecheck' # messages msgs = MSGS priority = -1 # configuration options options = (('ignore-mixin-members', {'default' : True, 'type' : 'yn', 'metavar': '<y_or_n>', 'help' : 'Tells whether missing members accessed in mixin \ class should be ignored. A mixin class is detected if its name ends with \ "mixin" (case insensitive).'} ), ('ignored-modules', {'default': (), 'type': 'csv', 'metavar': '<module names>', 'help': 'List of module names for which member attributes \ should not be checked (useful for modules/projects where namespaces are \ manipulated during runtime and thus existing member attributes cannot be \ deduced by static analysis'}, ), ('ignored-classes', {'default' : ('SQLObject',), 'type' : 'csv', 'metavar' : '<members names>', 'help' : 'List of classes names for which member attributes \ should not be checked (useful for classes with attributes dynamically set).'} ), ('zope', {'default' : False, 'type' : 'yn', 'metavar': '<y_or_n>', 'help' : 'When zope mode is activated, add a predefined set \ of Zope acquired attributes to generated-members.'} ), ('generated-members', {'default' : ('REQUEST', 'acl_users', 'aq_parent'), 'type' : 'string', 'metavar' : '<members names>', 'help' : 'List of members which are set dynamically and \ missed by pylint inference system, and so shouldn\'t trigger E0201 when \ accessed. Python regular expressions are accepted.'} ), ) def open(self): # do this in open since config not fully initialized in __init__ self.generated_members = list(self.config.generated_members) if self.config.zope: self.generated_members.extend(('REQUEST', 'acl_users', 'aq_parent')) def visit_assattr(self, node): if isinstance(node.ass_type(), astroid.AugAssign): self.visit_getattr(node) def visit_delattr(self, node): self.visit_getattr(node) @check_messages('no-member') def visit_getattr(self, node): """check that the accessed attribute exists to avoid to much false positives for now, we'll consider the code as correct if a single of the inferred nodes has the accessed attribute. function/method, super call and metaclasses are ignored """ # generated_members may containt regular expressions # (surrounded by quote `"` and followed by a comma `,`) # REQUEST,aq_parent,"[a-zA-Z]+_set{1,2}"' => # ('REQUEST', 'aq_parent', '[a-zA-Z]+_set{1,2}') if isinstance(self.config.generated_members, str): gen = shlex.shlex(self.config.generated_members) gen.whitespace += ',' gen.wordchars += '[]-+' self.config.generated_members = tuple(tok.strip('"') for tok in gen) for pattern in self.config.generated_members: # attribute is marked as generated, stop here if re.match(pattern, node.attrname): return try: infered = list(node.expr.infer()) except InferenceError: return # list of (node, nodename) which are missing the attribute missingattr = set() ignoremim = self.config.ignore_mixin_members inference_failure = False for owner in infered: # skip yes object if owner is YES: inference_failure = True continue # skip None anyway if isinstance(owner, astroid.Const) and owner.value is None: continue # XXX "super" / metaclass call if is_super(owner) or getattr(owner, 'type', None) == 'metaclass': continue name = getattr(owner, 'name', 'None') if name in self.config.ignored_classes: continue if ignoremim and name[-5:].lower() == 'mixin': continue try: if not [n for n in owner.getattr(node.attrname) if not isinstance(n.statement(), astroid.AugAssign)]: missingattr.add((owner, name)) continue except AttributeError: # XXX method / function continue except NotFoundError: if isinstance(owner, astroid.Function) and owner.decorators: continue if isinstance(owner, Instance) and owner.has_dynamic_getattr(): continue # explicit skipping of module member access if owner.root().name in self.config.ignored_modules: continue if isinstance(owner, astroid.Class): # Look up in the metaclass only if the owner is itself # a class. # TODO: getattr doesn't return by default members # from the metaclass, because handling various cases # of methods accessible from the metaclass itself # and/or subclasses only is too complicated for little to # no benefit. metaclass = owner.metaclass() try: if metaclass and metaclass.getattr(node.attrname): continue except NotFoundError: pass missingattr.add((owner, name)) continue # stop on the first found break else: # we have not found any node with the attributes, display the # message for infered nodes done = set() for owner, name in missingattr: if isinstance(owner, Instance): actual = owner._proxied else: actual = owner if actual in done: continue done.add(actual) confidence = INFERENCE if not inference_failure else INFERENCE_FAILURE self.add_message('no-member', node=node, args=(owner.display_type(), name, node.attrname), confidence=confidence) @check_messages('assignment-from-no-return', 'assignment-from-none') def visit_assign(self, node): """check that if assigning to a function call, the function is possibly returning something valuable """ if not isinstance(node.value, astroid.CallFunc): return function_node = safe_infer(node.value.func) # skip class, generator and incomplete function definition if not (isinstance(function_node, astroid.Function) and function_node.root().fully_defined()): return if function_node.is_generator() \ or function_node.is_abstract(pass_is_abstract=False): return returns = list(function_node.nodes_of_class(astroid.Return, skip_klass=astroid.Function)) if len(returns) == 0: self.add_message('assignment-from-no-return', node=node) else: for rnode in returns: if not (isinstance(rnode.value, astroid.Const) and rnode.value.value is None or rnode.value is None): break else: self.add_message('assignment-from-none', node=node) def _check_uninferable_callfunc(self, node): """ Check that the given uninferable CallFunc node does not call an actual function. """ if not isinstance(node.func, astroid.Getattr): return # Look for properties. First, obtain # the lhs of the Getattr node and search the attribute # there. If that attribute is a property or a subclass of properties, # then most likely it's not callable. # TODO: since astroid doesn't understand descriptors very well # we will not handle them here, right now. expr = node.func.expr klass = safe_infer(expr) if (klass is None or klass is astroid.YES or not isinstance(klass, astroid.Instance)): return try: attrs = klass._proxied.getattr(node.func.attrname) except astroid.NotFoundError: return for attr in attrs: if attr is astroid.YES: continue if not isinstance(attr, astroid.Function): continue # Decorated, see if it is decorated with a property if decorated_with_property(attr): self.add_message('not-callable', node=node, args=node.func.as_string()) break @check_messages(*(list(MSGS.keys()))) def visit_callfunc(self, node): """check that called functions/methods are inferred to callable objects, and that the arguments passed to the function match the parameters in the inferred function's definition """ # Build the set of keyword arguments, checking for duplicate keywords, # and count the positional arguments. keyword_args = set() num_positional_args = 0 for arg in node.args: if isinstance(arg, astroid.Keyword): keyword_args.add(arg.arg) else: num_positional_args += 1 called = safe_infer(node.func) # only function, generator and object defining __call__ are allowed if called is not None and not called.callable(): self.add_message('not-callable', node=node, args=node.func.as_string()) self._check_uninferable_callfunc(node) try: called, implicit_args, callable_name = _determine_callable(called) except ValueError: # Any error occurred during determining the function type, most of # those errors are handled by different warnings. return num_positional_args += implicit_args if called.args.args is None: # Built-in functions have no argument information. return if len(called.argnames()) != len(set(called.argnames())): # Duplicate parameter name (see E9801). We can't really make sense # of the function call in this case, so just return. return # Analyze the list of formal parameters. num_mandatory_parameters = len(called.args.args) - len(called.args.defaults) parameters = [] parameter_name_to_index = {} for i, arg in enumerate(called.args.args): if isinstance(arg, astroid.Tuple): name = None # Don't store any parameter names within the tuple, since those # are not assignable from keyword arguments. else: if isinstance(arg, astroid.Keyword): name = arg.arg else: assert isinstance(arg, astroid.AssName) # This occurs with: # def f( (a), (b) ): pass name = arg.name parameter_name_to_index[name] = i if i >= num_mandatory_parameters: defval = called.args.defaults[i - num_mandatory_parameters] else: defval = None parameters.append([(name, defval), False]) kwparams = {} for i, arg in enumerate(called.args.kwonlyargs): if isinstance(arg, astroid.Keyword): name = arg.arg else: assert isinstance(arg, astroid.AssName) name = arg.name kwparams[name] = [called.args.kw_defaults[i], False] # Match the supplied arguments against the function parameters. # 1. Match the positional arguments. for i in range(num_positional_args): if i < len(parameters): parameters[i][1] = True elif called.args.vararg is not None: # The remaining positional arguments get assigned to the *args # parameter. break else: # Too many positional arguments. self.add_message('too-many-function-args', node=node, args=(callable_name,)) break # 2. Match the keyword arguments. for keyword in keyword_args: if keyword in parameter_name_to_index: i = parameter_name_to_index[keyword] if parameters[i][1]: # Duplicate definition of function parameter. self.add_message('redundant-keyword-arg', node=node, args=(keyword, callable_name)) else: parameters[i][1] = True elif keyword in kwparams: if kwparams[keyword][1]: # XXX is that even possible? # Duplicate definition of function parameter. self.add_message('redundant-keyword-arg', node=node, args=(keyword, callable_name)) else: kwparams[keyword][1] = True elif called.args.kwarg is not None: # The keyword argument gets assigned to the **kwargs parameter. pass else: # Unexpected keyword argument. self.add_message('unexpected-keyword-arg', node=node, args=(keyword, callable_name)) # 3. Match the *args, if any. Note that Python actually processes # *args _before_ any keyword arguments, but we wait until after # looking at the keyword arguments so as to make a more conservative # guess at how many values are in the *args sequence. if node.starargs is not None: for i in range(num_positional_args, len(parameters)): [(name, defval), assigned] = parameters[i] # Assume that *args provides just enough values for all # non-default parameters after the last parameter assigned by # the positional arguments but before the first parameter # assigned by the keyword arguments. This is the best we can # get without generating any false positives. if (defval is not None) or assigned: break parameters[i][1] = True # 4. Match the **kwargs, if any. if node.kwargs is not None: for i, [(name, defval), assigned] in enumerate(parameters): # Assume that *kwargs provides values for all remaining # unassigned named parameters. if name is not None: parameters[i][1] = True else: # **kwargs can't assign to tuples. pass # Check that any parameters without a default have been assigned # values. for [(name, defval), assigned] in parameters: if (defval is None) and not assigned: if name is None: display_name = '<tuple>' else: display_name = repr(name) self.add_message('no-value-for-parameter', node=node, args=(display_name, callable_name)) for name in kwparams: defval, assigned = kwparams[name] if defval is None and not assigned: self.add_message('missing-kwoa', node=node, args=(name, callable_name)) @check_messages('invalid-sequence-index') def visit_extslice(self, node): # Check extended slice objects as if they were used as a sequence # index to check if the object being sliced can support them return self.visit_index(node) @check_messages('invalid-sequence-index') def visit_index(self, node): if not node.parent or not hasattr(node.parent, "value"): return # Look for index operations where the parent is a sequence type. # If the types can be determined, only allow indices to be int, # slice or instances with __index__. parent_type = safe_infer(node.parent.value) if not isinstance(parent_type, (astroid.Class, astroid.Instance)): return # Determine what method on the parent this index will use # The parent of this node will be a Subscript, and the parent of that # node determines if the Subscript is a get, set, or delete operation. operation = node.parent.parent if isinstance(operation, astroid.Assign): methodname = '__setitem__' elif isinstance(operation, astroid.Delete): methodname = '__delitem__' else: methodname = '__getitem__' # Check if this instance's __getitem__, __setitem__, or __delitem__, as # appropriate to the statement, is implemented in a builtin sequence # type. This way we catch subclasses of sequence types but skip classes # that override __getitem__ and which may allow non-integer indices. try: methods = parent_type.getattr(methodname) if methods is astroid.YES: return itemmethod = methods[0] except (astroid.NotFoundError, IndexError): return if not isinstance(itemmethod, astroid.Function): return if itemmethod.root().name != BUILTINS: return if not itemmethod.parent: return if itemmethod.parent.name not in SEQUENCE_TYPES: return # For ExtSlice objects coming from visit_extslice, no further # inference is necessary, since if we got this far the ExtSlice # is an error. if isinstance(node, astroid.ExtSlice): index_type = node else: index_type = safe_infer(node) if index_type is None or index_type is astroid.YES: return # Constants must be of type int if isinstance(index_type, astroid.Const): if isinstance(index_type.value, int): return # Instance values must be int, slice, or have an __index__ method elif isinstance(index_type, astroid.Instance): if index_type.pytype() in (BUILTINS + '.int', BUILTINS + '.slice'): return try: index_type.getattr('__index__') return except astroid.NotFoundError: pass # Anything else is an error self.add_message('invalid-sequence-index', node=node) @check_messages('invalid-slice-index') def visit_slice(self, node): # Check the type of each part of the slice for index in (node.lower, node.upper, node.step): if index is None: continue index_type = safe_infer(index) if index_type is None or index_type is astroid.YES: continue # Constants must of type int or None if isinstance(index_type, astroid.Const): if isinstance(index_type.value, (int, type(None))): continue # Instance values must be of type int, None or an object # with __index__ elif isinstance(index_type, astroid.Instance): if index_type.pytype() in (BUILTINS + '.int', BUILTINS + '.NoneType'): continue try: index_type.getattr('__index__') return except astroid.NotFoundError: pass # Anything else is an error self.add_message('invalid-slice-index', node=node) def register(linter): """required method to auto register this checker """ linter.register_checker(TypeChecker(linter))
{ "content_hash": "7d0fb810be93483df97fa42378206cc1", "timestamp": "", "source": "github", "line_count": 607, "max_line_length": 86, "avg_line_length": 42.41350906095552, "alnum_prop": 0.5573120994367838, "repo_name": "Titulacion-Sistemas/PythonTitulacion-EV", "id": "10b9f8669fb9353ab9e4766e5174a6ef5a257078", "size": "26559", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "Lib/site-packages/pylint/checkers/typecheck.py", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "2117" }, { "name": "C", "bytes": "469338" }, { "name": "C++", "bytes": "93276" }, { "name": "CSS", "bytes": "173812" }, { "name": "JavaScript", "bytes": "203291" }, { "name": "PowerShell", "bytes": "8104" }, { "name": "Python", "bytes": "17198855" }, { "name": "Shell", "bytes": "2237" }, { "name": "TeX", "bytes": "1527" }, { "name": "Visual Basic", "bytes": "904" }, { "name": "XSLT", "bytes": "154751" } ], "symlink_target": "" }
import collections.abc import contextlib import hashlib import logging import math import operator import os import signal import warnings from collections import defaultdict from datetime import datetime, timedelta from functools import partial from types import TracebackType from typing import ( TYPE_CHECKING, Any, Callable, Collection, ContextManager, Dict, Generator, Iterable, List, NamedTuple, Optional, Set, Tuple, Union, ) from urllib.parse import quote import attr import dill import jinja2 import pendulum from jinja2 import TemplateAssertionError, UndefinedError from sqlalchemy import ( Column, DateTime, Float, ForeignKeyConstraint, Index, Integer, String, and_, false, func, inspect, or_, text, ) from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.orm import reconstructor, relationship from sqlalchemy.orm.attributes import NO_VALUE, set_committed_value from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.query import Query from sqlalchemy.orm.session import Session from sqlalchemy.sql.elements import BooleanClauseList from sqlalchemy.sql.expression import ColumnOperators from airflow import settings from airflow.compat.functools import cache from airflow.configuration import conf from airflow.exceptions import ( AirflowException, AirflowFailException, AirflowRescheduleException, AirflowSensorTimeout, AirflowSkipException, AirflowSmartSensorException, AirflowTaskTimeout, DagRunNotFound, TaskDeferralError, TaskDeferred, UnmappableXComLengthPushed, UnmappableXComTypePushed, XComForMappingNotPushed, ) from airflow.models.base import Base, StringID from airflow.models.log import Log from airflow.models.param import ParamsDict from airflow.models.taskfail import TaskFail from airflow.models.taskmap import TaskMap from airflow.models.taskreschedule import TaskReschedule from airflow.models.xcom import XCOM_RETURN_KEY, XCom from airflow.plugins_manager import integrate_macros_plugins from airflow.sentry import Sentry from airflow.stats import Stats from airflow.templates import SandboxedEnvironment from airflow.ti_deps.dep_context import DepContext from airflow.ti_deps.dependencies_deps import REQUEUEABLE_DEPS, RUNNING_DEPS from airflow.timetables.base import DataInterval from airflow.typing_compat import Literal from airflow.utils import timezone from airflow.utils.context import ConnectionAccessor, Context, VariableAccessor, context_merge from airflow.utils.email import send_email from airflow.utils.helpers import render_template_to_string from airflow.utils.log.logging_mixin import LoggingMixin from airflow.utils.net import get_hostname from airflow.utils.operator_helpers import context_to_airflow_vars from airflow.utils.platform import getuser from airflow.utils.retries import run_with_db_retries from airflow.utils.session import NEW_SESSION, create_session, provide_session from airflow.utils.sqlalchemy import ( ExecutorConfigType, ExtendedJSON, UtcDateTime, tuple_in_condition, with_row_locks, ) from airflow.utils.state import DagRunState, State, TaskInstanceState from airflow.utils.timeout import timeout TR = TaskReschedule _CURRENT_CONTEXT: List[Context] = [] log = logging.getLogger(__name__) if TYPE_CHECKING: from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG, DagModel from airflow.models.dagrun import DagRun from airflow.models.operator import Operator @contextlib.contextmanager def set_current_context(context: Context) -> Generator[Context, None, None]: """ Sets the current execution context to the provided context object. This method should be called once per Task execution, before calling operator.execute. """ _CURRENT_CONTEXT.append(context) try: yield context finally: expected_state = _CURRENT_CONTEXT.pop() if expected_state != context: log.warning( "Current context is not equal to the state at context stack. Expected=%s, got=%s", context, expected_state, ) def clear_task_instances( tis, session, activate_dag_runs=None, dag=None, dag_run_state: Union[DagRunState, Literal[False]] = DagRunState.QUEUED, ): """ Clears a set of task instances, but makes sure the running ones get killed. :param tis: a list of task instances :param session: current session :param dag_run_state: state to set DagRun to. If set to False, dagrun state will not be changed. :param dag: DAG object :param activate_dag_runs: Deprecated parameter, do not pass """ job_ids = [] # Keys: dag_id -> run_id -> map_indexes -> try_numbers -> task_id task_id_by_key: Dict[str, Dict[str, Dict[int, Dict[int, Set[str]]]]] = defaultdict( lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(set))) ) for ti in tis: if ti.state == TaskInstanceState.RUNNING: if ti.job_id: # If a task is cleared when running, set its state to RESTARTING so that # the task is terminated and becomes eligible for retry. ti.state = TaskInstanceState.RESTARTING job_ids.append(ti.job_id) else: task_id = ti.task_id if dag and dag.has_task(task_id): task = dag.get_task(task_id) ti.refresh_from_task(task) task_retries = task.retries ti.max_tries = ti.try_number + task_retries - 1 else: # Ignore errors when updating max_tries if dag is None or # task not found in dag since database records could be # outdated. We make max_tries the maximum value of its # original max_tries or the last attempted try number. ti.max_tries = max(ti.max_tries, ti.prev_attempted_tries) ti.state = None ti.external_executor_id = None ti.clear_next_method_args() session.merge(ti) task_id_by_key[ti.dag_id][ti.run_id][ti.map_index][ti.try_number].add(ti.task_id) if task_id_by_key: # Clear all reschedules related to the ti to clear # This is an optimization for the common case where all tis are for a small number # of dag_id, run_id, try_number, and map_index. Use a nested dict of dag_id, # run_id, try_number, map_index, and task_id to construct the where clause in a # hierarchical manner. This speeds up the delete statement by more than 40x for # large number of tis (50k+). conditions = or_( and_( TR.dag_id == dag_id, or_( and_( TR.run_id == run_id, or_( and_( TR.map_index == map_index, or_( and_(TR.try_number == try_number, TR.task_id.in_(task_ids)) for try_number, task_ids in task_tries.items() ), ) for map_index, task_tries in map_indexes.items() ), ) for run_id, map_indexes in run_ids.items() ), ) for dag_id, run_ids in task_id_by_key.items() ) delete_qry = TR.__table__.delete().where(conditions) session.execute(delete_qry) if job_ids: from airflow.jobs.base_job import BaseJob for job in session.query(BaseJob).filter(BaseJob.id.in_(job_ids)).all(): job.state = TaskInstanceState.RESTARTING if activate_dag_runs is not None: warnings.warn( "`activate_dag_runs` parameter to clear_task_instances function is deprecated. " "Please use `dag_run_state`", DeprecationWarning, stacklevel=2, ) if not activate_dag_runs: dag_run_state = False if dag_run_state is not False and tis: from airflow.models.dagrun import DagRun # Avoid circular import run_ids_by_dag_id = defaultdict(set) for instance in tis: run_ids_by_dag_id[instance.dag_id].add(instance.run_id) drs = ( session.query(DagRun) .filter( or_( and_(DagRun.dag_id == dag_id, DagRun.run_id.in_(run_ids)) for dag_id, run_ids in run_ids_by_dag_id.items() ) ) .all() ) dag_run_state = DagRunState(dag_run_state) # Validate the state value. for dr in drs: dr.state = dag_run_state dr.start_date = timezone.utcnow() if dag_run_state == DagRunState.QUEUED: dr.last_scheduling_decision = None dr.start_date = None class _LazyXComAccessIterator(collections.abc.Iterator): __slots__ = ['_cm', '_it'] def __init__(self, cm: ContextManager[Query]): self._cm = cm self._it = None def __del__(self): if self._it: self._cm.__exit__(None, None, None) def __iter__(self): return self def __next__(self): if not self._it: self._it = iter(self._cm.__enter__()) return XCom.deserialize_value(next(self._it)) @attr.define class _LazyXComAccess(collections.abc.Sequence): """Wrapper to lazily pull XCom with a sequence-like interface. Note that since the session bound to the parent query may have died when we actually access the sequence's content, we must create a new session for every function call with ``with_session()``. """ dag_id: str run_id: str task_id: str _query: Query = attr.ib(repr=False) _len: Optional[int] = attr.ib(init=False, repr=False, default=None) @classmethod def build_from_single_xcom(cls, first: "XCom", query: Query) -> "_LazyXComAccess": return cls( dag_id=first.dag_id, run_id=first.run_id, task_id=first.task_id, query=query.with_entities(XCom.value) .filter( XCom.run_id == first.run_id, XCom.task_id == first.task_id, XCom.dag_id == first.dag_id, XCom.map_index >= 0, ) .order_by(None) .order_by(XCom.map_index.asc()), ) def __len__(self): if self._len is None: with self._get_bound_query() as query: self._len = query.count() return self._len def __iter__(self): return _LazyXComAccessIterator(self._get_bound_query()) def __getitem__(self, key): if not isinstance(key, int): raise ValueError("only support index access for now") try: with self._get_bound_query() as query: r = query.offset(key).limit(1).one() except NoResultFound: raise IndexError(key) from None return XCom.deserialize_value(r) @contextlib.contextmanager def _get_bound_query(self) -> Generator[Query, None, None]: # Do we have a valid session already? if self._query.session and self._query.session.is_active: yield self._query return session = settings.Session() try: yield self._query.with_session(session) finally: session.close() class TaskInstanceKey(NamedTuple): """Key used to identify task instance.""" dag_id: str task_id: str run_id: str try_number: int = 1 map_index: int = -1 @property def primary(self) -> Tuple[str, str, str, int]: """Return task instance primary key part of the key""" return self.dag_id, self.task_id, self.run_id, self.map_index @property def reduced(self) -> 'TaskInstanceKey': """Remake the key by subtracting 1 from try number to match in memory information""" return TaskInstanceKey( self.dag_id, self.task_id, self.run_id, max(1, self.try_number - 1), self.map_index ) def with_try_number(self, try_number: int) -> 'TaskInstanceKey': """Returns TaskInstanceKey with provided ``try_number``""" return TaskInstanceKey(self.dag_id, self.task_id, self.run_id, try_number, self.map_index) @property def key(self) -> "TaskInstanceKey": """For API-compatibly with TaskInstance. Returns self """ return self class TaskInstance(Base, LoggingMixin): """ Task instances store the state of a task instance. This table is the authority and single source of truth around what tasks have run and the state they are in. The SqlAlchemy model doesn't have a SqlAlchemy foreign key to the task or dag model deliberately to have more control over transactions. Database transactions on this table should insure double triggers and any confusion around what task instances are or aren't ready to run even while multiple schedulers may be firing task instances. A value of -1 in map_index represents any of: a TI without mapped tasks; a TI with mapped tasks that has yet to be expanded (state=pending); a TI with mapped tasks that expanded to an empty list (state=skipped). """ __tablename__ = "task_instance" task_id = Column(StringID(), primary_key=True, nullable=False) dag_id = Column(StringID(), primary_key=True, nullable=False) run_id = Column(StringID(), primary_key=True, nullable=False) map_index = Column(Integer, primary_key=True, nullable=False, server_default=text("-1")) start_date = Column(UtcDateTime) end_date = Column(UtcDateTime) duration = Column(Float) state = Column(String(20)) _try_number = Column('try_number', Integer, default=0) max_tries = Column(Integer, server_default=text("-1")) hostname = Column(String(1000)) unixname = Column(String(1000)) job_id = Column(Integer) pool = Column(String(256), nullable=False) pool_slots = Column(Integer, default=1, nullable=False) queue = Column(String(256)) priority_weight = Column(Integer) operator = Column(String(1000)) queued_dttm = Column(UtcDateTime) queued_by_job_id = Column(Integer) pid = Column(Integer) executor_config = Column(ExecutorConfigType(pickler=dill)) external_executor_id = Column(StringID()) # The trigger to resume on if we are in state DEFERRED trigger_id = Column(Integer) # Optional timeout datetime for the trigger (past this, we'll fail) trigger_timeout = Column(DateTime) # The trigger_timeout should be TIMESTAMP(using UtcDateTime) but for ease of # migration, we are keeping it as DateTime pending a change where expensive # migration is inevitable. # The method to call next, and any extra arguments to pass to it. # Usually used when resuming from DEFERRED. next_method = Column(String(1000)) next_kwargs = Column(MutableDict.as_mutable(ExtendedJSON)) # If adding new fields here then remember to add them to # refresh_from_db() or they won't display in the UI correctly __table_args__ = ( Index('ti_dag_state', dag_id, state), Index('ti_dag_run', dag_id, run_id), Index('ti_state', state), Index('ti_state_lkp', dag_id, task_id, run_id, state), Index('ti_pool', pool, state, priority_weight), Index('ti_job_id', job_id), Index('ti_trigger_id', trigger_id), ForeignKeyConstraint( [trigger_id], ['trigger.id'], name='task_instance_trigger_id_fkey', ondelete='CASCADE', ), ForeignKeyConstraint( [dag_id, run_id], ["dag_run.dag_id", "dag_run.run_id"], name='task_instance_dag_run_fkey', ondelete="CASCADE", ), ) dag_model = relationship( "DagModel", primaryjoin="TaskInstance.dag_id == DagModel.dag_id", foreign_keys=dag_id, uselist=False, innerjoin=True, viewonly=True, ) trigger = relationship( "Trigger", primaryjoin="TaskInstance.trigger_id == Trigger.id", foreign_keys=trigger_id, uselist=False, innerjoin=True, ) dag_run = relationship("DagRun", back_populates="task_instances", lazy='joined', innerjoin=True) rendered_task_instance_fields = relationship("RenderedTaskInstanceFields", lazy='noload', uselist=False) execution_date = association_proxy("dag_run", "execution_date") task: "Operator" # Not always set... def __init__( self, task: "Operator", execution_date: Optional[datetime] = None, run_id: Optional[str] = None, state: Optional[str] = None, map_index: int = -1, ): super().__init__() self.dag_id = task.dag_id self.task_id = task.task_id self.map_index = map_index self.refresh_from_task(task) # init_on_load will config the log self.init_on_load() if run_id is None and execution_date is not None: from airflow.models.dagrun import DagRun # Avoid circular import warnings.warn( "Passing an execution_date to `TaskInstance()` is deprecated in favour of passing a run_id", DeprecationWarning, # Stack level is 4 because SQLA adds some wrappers around the constructor stacklevel=4, ) # make sure we have a localized execution_date stored in UTC if execution_date and not timezone.is_localized(execution_date): self.log.warning( "execution date %s has no timezone information. Using default from dag or system", execution_date, ) if self.task.has_dag(): assert self.task.dag # For Mypy. execution_date = timezone.make_aware(execution_date, self.task.dag.timezone) else: execution_date = timezone.make_aware(execution_date) execution_date = timezone.convert_to_utc(execution_date) with create_session() as session: run_id = ( session.query(DagRun.run_id) .filter_by(dag_id=self.dag_id, execution_date=execution_date) .scalar() ) if run_id is None: raise DagRunNotFound( f"DagRun for {self.dag_id!r} with date {execution_date} not found" ) from None self.run_id = run_id self.try_number = 0 self.max_tries = self.task.retries self.unixname = getuser() if state: self.state = state self.hostname = '' # Is this TaskInstance being currently running within `airflow tasks run --raw`. # Not persisted to the database so only valid for the current process self.raw = False # can be changed when calling 'run' self.test_mode = False @staticmethod def insert_mapping(run_id: str, task: "Operator", map_index: int) -> dict: """:meta private:""" return { 'dag_id': task.dag_id, 'task_id': task.task_id, 'run_id': run_id, '_try_number': 0, 'hostname': '', 'unixname': getuser(), 'queue': task.queue, 'pool': task.pool, 'pool_slots': task.pool_slots, 'priority_weight': task.priority_weight_total, 'run_as_user': task.run_as_user, 'max_tries': task.retries, 'executor_config': task.executor_config, 'operator': task.task_type, 'map_index': map_index, } @reconstructor def init_on_load(self): """Initialize the attributes that aren't stored in the DB""" # correctly config the ti log self._log = logging.getLogger("airflow.task") self.test_mode = False # can be changed when calling 'run' @property def try_number(self): """ Return the try number that this task number will be when it is actually run. If the TaskInstance is currently running, this will match the column in the database, in all other cases this will be incremented. """ # This is designed so that task logs end up in the right file. # TODO: whether we need sensing here or not (in sensor and task_instance state machine) if self.state in State.running: return self._try_number return self._try_number + 1 @try_number.setter def try_number(self, value): self._try_number = value @property def prev_attempted_tries(self): """ Based on this instance's try_number, this will calculate the number of previously attempted tries, defaulting to 0. """ # Expose this for the Task Tries and Gantt graph views. # Using `try_number` throws off the counts for non-running tasks. # Also useful in error logging contexts to get # the try number for the last try that was attempted. # https://issues.apache.org/jira/browse/AIRFLOW-2143 return self._try_number @property def next_try_number(self): """Setting Next Try Number""" return self._try_number + 1 def command_as_list( self, mark_success=False, ignore_all_deps=False, ignore_task_deps=False, ignore_depends_on_past=False, ignore_ti_state=False, local=False, pickle_id=None, raw=False, job_id=None, pool=None, cfg_path=None, ): """ Returns a command that can be executed anywhere where airflow is installed. This command is part of the message sent to executors by the orchestrator. """ dag: Union["DAG", "DagModel"] # Use the dag if we have it, else fallback to the ORM dag_model, which might not be loaded if hasattr(self, 'task') and hasattr(self.task, 'dag'): dag = self.task.dag else: dag = self.dag_model should_pass_filepath = not pickle_id and dag path = None if should_pass_filepath: if dag.is_subdag: path = dag.parent_dag.relative_fileloc else: path = dag.relative_fileloc if path: if not path.is_absolute(): path = 'DAGS_FOLDER' / path path = str(path) return TaskInstance.generate_command( self.dag_id, self.task_id, run_id=self.run_id, mark_success=mark_success, ignore_all_deps=ignore_all_deps, ignore_task_deps=ignore_task_deps, ignore_depends_on_past=ignore_depends_on_past, ignore_ti_state=ignore_ti_state, local=local, pickle_id=pickle_id, file_path=path, raw=raw, job_id=job_id, pool=pool, cfg_path=cfg_path, map_index=self.map_index, ) @staticmethod def generate_command( dag_id: str, task_id: str, run_id: str, mark_success: bool = False, ignore_all_deps: bool = False, ignore_depends_on_past: bool = False, ignore_task_deps: bool = False, ignore_ti_state: bool = False, local: bool = False, pickle_id: Optional[int] = None, file_path: Optional[str] = None, raw: bool = False, job_id: Optional[str] = None, pool: Optional[str] = None, cfg_path: Optional[str] = None, map_index: int = -1, ) -> List[str]: """ Generates the shell command required to execute this task instance. :param dag_id: DAG ID :param task_id: Task ID :param run_id: The run_id of this task's DagRun :param mark_success: Whether to mark the task as successful :param ignore_all_deps: Ignore all ignorable dependencies. Overrides the other ignore_* parameters. :param ignore_depends_on_past: Ignore depends_on_past parameter of DAGs (e.g. for Backfills) :param ignore_task_deps: Ignore task-specific dependencies such as depends_on_past and trigger rule :param ignore_ti_state: Ignore the task instance's previous failure/success :param local: Whether to run the task locally :param pickle_id: If the DAG was serialized to the DB, the ID associated with the pickled DAG :param file_path: path to the file containing the DAG definition :param raw: raw mode (needs more details) :param job_id: job ID (needs more details) :param pool: the Airflow pool that the task should run in :param cfg_path: the Path to the configuration file :return: shell command that can be used to run the task instance :rtype: list[str] """ cmd = ["airflow", "tasks", "run", dag_id, task_id, run_id] if mark_success: cmd.extend(["--mark-success"]) if pickle_id: cmd.extend(["--pickle", str(pickle_id)]) if job_id: cmd.extend(["--job-id", str(job_id)]) if ignore_all_deps: cmd.extend(["--ignore-all-dependencies"]) if ignore_task_deps: cmd.extend(["--ignore-dependencies"]) if ignore_depends_on_past: cmd.extend(["--ignore-depends-on-past"]) if ignore_ti_state: cmd.extend(["--force"]) if local: cmd.extend(["--local"]) if pool: cmd.extend(["--pool", pool]) if raw: cmd.extend(["--raw"]) if file_path: cmd.extend(["--subdir", file_path]) if cfg_path: cmd.extend(["--cfg-path", cfg_path]) if map_index != -1: cmd.extend(['--map-index', str(map_index)]) return cmd @property def log_url(self): """Log URL for TaskInstance""" iso = quote(self.execution_date.isoformat()) base_url = conf.get('webserver', 'BASE_URL') return ( f"{base_url}/log" f"?execution_date={iso}" f"&task_id={self.task_id}" f"&dag_id={self.dag_id}" f"&map_index={self.map_index}" ) @property def mark_success_url(self): """URL to mark TI success""" base_url = conf.get('webserver', 'BASE_URL') return base_url + ( "/confirm" f"?task_id={self.task_id}" f"&dag_id={self.dag_id}" f"&dag_run_id={quote(self.run_id)}" "&upstream=false" "&downstream=false" "&state=success" ) @provide_session def current_state(self, session=NEW_SESSION) -> str: """ Get the very latest state from the database, if a session is passed, we use and looking up the state becomes part of the session, otherwise a new session is used. :param session: SQLAlchemy ORM Session """ return ( session.query(TaskInstance.state) .filter( TaskInstance.dag_id == self.dag_id, TaskInstance.task_id == self.task_id, TaskInstance.run_id == self.run_id, ) .scalar() ) @provide_session def error(self, session=NEW_SESSION): """ Forces the task instance's state to FAILED in the database. :param session: SQLAlchemy ORM Session """ self.log.error("Recording the task instance as FAILED") self.state = State.FAILED session.merge(self) session.commit() @provide_session def refresh_from_db(self, session=NEW_SESSION, lock_for_update=False) -> None: """ Refreshes the task instance from the database based on the primary key :param session: SQLAlchemy ORM Session :param lock_for_update: if True, indicates that the database should lock the TaskInstance (issuing a FOR UPDATE clause) until the session is committed. """ self.log.debug("Refreshing TaskInstance %s from DB", self) qry = session.query(TaskInstance).filter( TaskInstance.dag_id == self.dag_id, TaskInstance.task_id == self.task_id, TaskInstance.run_id == self.run_id, TaskInstance.map_index == self.map_index, ) if lock_for_update: for attempt in run_with_db_retries(logger=self.log): with attempt: ti: Optional[TaskInstance] = qry.with_for_update().first() else: ti = qry.first() if ti: # Fields ordered per model definition self.start_date = ti.start_date self.end_date = ti.end_date self.duration = ti.duration self.state = ti.state # Get the raw value of try_number column, don't read through the # accessor here otherwise it will be incremented by one already. self.try_number = ti._try_number self.max_tries = ti.max_tries self.hostname = ti.hostname self.unixname = ti.unixname self.job_id = ti.job_id self.pool = ti.pool self.pool_slots = ti.pool_slots or 1 self.queue = ti.queue self.priority_weight = ti.priority_weight self.operator = ti.operator self.queued_dttm = ti.queued_dttm self.queued_by_job_id = ti.queued_by_job_id self.pid = ti.pid self.executor_config = ti.executor_config self.external_executor_id = ti.external_executor_id self.trigger_id = ti.trigger_id self.next_method = ti.next_method self.next_kwargs = ti.next_kwargs else: self.state = None def refresh_from_task(self, task: "Operator", pool_override=None): """ Copy common attributes from the given task. :param task: The task object to copy from :param pool_override: Use the pool_override instead of task's pool """ self.task = task self.queue = task.queue self.pool = pool_override or task.pool self.pool_slots = task.pool_slots self.priority_weight = task.priority_weight_total self.run_as_user = task.run_as_user # Do not set max_tries to task.retries here because max_tries is a cumulative # value that needs to be stored in the db. self.executor_config = task.executor_config self.operator = task.task_type @provide_session def clear_xcom_data(self, session: Session = NEW_SESSION): """Clear all XCom data from the database for the task instance. If the task is unmapped, all XComs matching this task ID in the same DAG run are removed. If the task is mapped, only the one with matching map index is removed. :param session: SQLAlchemy ORM Session """ self.log.debug("Clearing XCom data") if self.map_index < 0: map_index: Optional[int] = None else: map_index = self.map_index XCom.clear( dag_id=self.dag_id, task_id=self.task_id, run_id=self.run_id, map_index=map_index, session=session, ) @property def key(self) -> TaskInstanceKey: """Returns a tuple that identifies the task instance uniquely""" return TaskInstanceKey(self.dag_id, self.task_id, self.run_id, self.try_number, self.map_index) @provide_session def set_state(self, state: Optional[str], session=NEW_SESSION): """ Set TaskInstance state. :param state: State to set for the TI :param session: SQLAlchemy ORM Session """ current_time = timezone.utcnow() self.log.debug("Setting task state for %s to %s", self, state) self.state = state self.start_date = self.start_date or current_time if self.state in State.finished or self.state == State.UP_FOR_RETRY: self.end_date = self.end_date or current_time self.duration = (self.end_date - self.start_date).total_seconds() session.merge(self) @property def is_premature(self): """ Returns whether a task is in UP_FOR_RETRY state and its retry interval has elapsed. """ # is the task still in the retry waiting period? return self.state == State.UP_FOR_RETRY and not self.ready_for_retry() @provide_session def are_dependents_done(self, session=NEW_SESSION): """ Checks whether the immediate dependents of this task instance have succeeded or have been skipped. This is meant to be used by wait_for_downstream. This is useful when you do not want to start processing the next schedule of a task until the dependents are done. For instance, if the task DROPs and recreates a table. :param session: SQLAlchemy ORM Session """ task = self.task if not task.downstream_task_ids: return True ti = session.query(func.count(TaskInstance.task_id)).filter( TaskInstance.dag_id == self.dag_id, TaskInstance.task_id.in_(task.downstream_task_ids), TaskInstance.run_id == self.run_id, TaskInstance.state.in_([State.SKIPPED, State.SUCCESS]), ) count = ti[0][0] return count == len(task.downstream_task_ids) @provide_session def get_previous_dagrun( self, state: Optional[DagRunState] = None, session: Optional[Session] = None, ) -> Optional["DagRun"]: """The DagRun that ran before this task instance's DagRun. :param state: If passed, it only take into account instances of a specific state. :param session: SQLAlchemy ORM Session. """ dag = self.task.dag if dag is None: return None dr = self.get_dagrun(session=session) dr.dag = dag # We always ignore schedule in dagrun lookup when `state` is given # or the DAG is never scheduled. For legacy reasons, when # `catchup=True`, we use `get_previous_scheduled_dagrun` unless # `ignore_schedule` is `True`. ignore_schedule = state is not None or not dag.timetable.can_run if dag.catchup is True and not ignore_schedule: last_dagrun = dr.get_previous_scheduled_dagrun(session=session) else: last_dagrun = dr.get_previous_dagrun(session=session, state=state) if last_dagrun: return last_dagrun return None @provide_session def get_previous_ti( self, state: Optional[DagRunState] = None, session: Session = NEW_SESSION, ) -> Optional['TaskInstance']: """ The task instance for the task that ran before this task instance. :param state: If passed, it only take into account instances of a specific state. :param session: SQLAlchemy ORM Session """ dagrun = self.get_previous_dagrun(state, session=session) if dagrun is None: return None return dagrun.get_task_instance(self.task_id, session=session) @property def previous_ti(self): """ This attribute is deprecated. Please use `airflow.models.taskinstance.TaskInstance.get_previous_ti` method. """ warnings.warn( """ This attribute is deprecated. Please use `airflow.models.taskinstance.TaskInstance.get_previous_ti` method. """, DeprecationWarning, stacklevel=2, ) return self.get_previous_ti() @property def previous_ti_success(self) -> Optional['TaskInstance']: """ This attribute is deprecated. Please use `airflow.models.taskinstance.TaskInstance.get_previous_ti` method. """ warnings.warn( """ This attribute is deprecated. Please use `airflow.models.taskinstance.TaskInstance.get_previous_ti` method. """, DeprecationWarning, stacklevel=2, ) return self.get_previous_ti(state=DagRunState.SUCCESS) @provide_session def get_previous_execution_date( self, state: Optional[DagRunState] = None, session: Session = NEW_SESSION, ) -> Optional[pendulum.DateTime]: """ The execution date from property previous_ti_success. :param state: If passed, it only take into account instances of a specific state. :param session: SQLAlchemy ORM Session """ self.log.debug("previous_execution_date was called") prev_ti = self.get_previous_ti(state=state, session=session) return prev_ti and pendulum.instance(prev_ti.execution_date) @provide_session def get_previous_start_date( self, state: Optional[DagRunState] = None, session: Session = NEW_SESSION ) -> Optional[pendulum.DateTime]: """ The start date from property previous_ti_success. :param state: If passed, it only take into account instances of a specific state. :param session: SQLAlchemy ORM Session """ self.log.debug("previous_start_date was called") prev_ti = self.get_previous_ti(state=state, session=session) # prev_ti may not exist and prev_ti.start_date may be None. return prev_ti and prev_ti.start_date and pendulum.instance(prev_ti.start_date) @property def previous_start_date_success(self) -> Optional[pendulum.DateTime]: """ This attribute is deprecated. Please use `airflow.models.taskinstance.TaskInstance.get_previous_start_date` method. """ warnings.warn( """ This attribute is deprecated. Please use `airflow.models.taskinstance.TaskInstance.get_previous_start_date` method. """, DeprecationWarning, stacklevel=2, ) return self.get_previous_start_date(state=DagRunState.SUCCESS) @provide_session def are_dependencies_met(self, dep_context=None, session=NEW_SESSION, verbose=False): """ Returns whether or not all the conditions are met for this task instance to be run given the context for the dependencies (e.g. a task instance being force run from the UI will ignore some dependencies). :param dep_context: The execution context that determines the dependencies that should be evaluated. :param session: database session :param verbose: whether log details on failed dependencies on info or debug log level """ dep_context = dep_context or DepContext() failed = False verbose_aware_logger = self.log.info if verbose else self.log.debug for dep_status in self.get_failed_dep_statuses(dep_context=dep_context, session=session): failed = True verbose_aware_logger( "Dependencies not met for %s, dependency '%s' FAILED: %s", self, dep_status.dep_name, dep_status.reason, ) if failed: return False verbose_aware_logger("Dependencies all met for %s", self) return True @provide_session def get_failed_dep_statuses(self, dep_context=None, session=NEW_SESSION): """Get failed Dependencies""" dep_context = dep_context or DepContext() for dep in dep_context.deps | self.task.deps: for dep_status in dep.get_dep_statuses(self, session, dep_context): self.log.debug( "%s dependency '%s' PASSED: %s, %s", self, dep_status.dep_name, dep_status.passed, dep_status.reason, ) if not dep_status.passed: yield dep_status def __repr__(self): prefix = f"<TaskInstance: {self.dag_id}.{self.task_id} {self.run_id} " if self.map_index != -1: prefix += f"map_index={self.map_index} " return prefix + f"[{self.state}]>" def next_retry_datetime(self): """ Get datetime of the next retry if the task instance fails. For exponential backoff, retry_delay is used as base and will be converted to seconds. """ delay = self.task.retry_delay if self.task.retry_exponential_backoff: # If the min_backoff calculation is below 1, it will be converted to 0 via int. Thus, # we must round up prior to converting to an int, otherwise a divide by zero error # will occur in the modded_hash calculation. min_backoff = int(math.ceil(delay.total_seconds() * (2 ** (self.try_number - 2)))) # In the case when delay.total_seconds() is 0, min_backoff will not be rounded up to 1. # To address this, we impose a lower bound of 1 on min_backoff. This effectively makes # the ceiling function unnecessary, but the ceiling function was retained to avoid # introducing a breaking change. if min_backoff < 1: min_backoff = 1 # deterministic per task instance ti_hash = int( hashlib.sha1( f"{self.dag_id}#{self.task_id}#{self.execution_date}#{self.try_number}".encode() ).hexdigest(), 16, ) # between 1 and 1.0 * delay * (2^retry_number) modded_hash = min_backoff + ti_hash % min_backoff # timedelta has a maximum representable value. The exponentiation # here means this value can be exceeded after a certain number # of tries (around 50 if the initial delay is 1s, even fewer if # the delay is larger). Cap the value here before creating a # timedelta object so the operation doesn't fail. delay_backoff_in_seconds = min(modded_hash, timedelta.max.total_seconds() - 1) delay = timedelta(seconds=delay_backoff_in_seconds) if self.task.max_retry_delay: delay = min(self.task.max_retry_delay, delay) return self.end_date + delay def ready_for_retry(self): """ Checks on whether the task instance is in the right state and timeframe to be retried. """ return self.state == State.UP_FOR_RETRY and self.next_retry_datetime() < timezone.utcnow() @provide_session def get_dagrun(self, session: Session = NEW_SESSION) -> "DagRun": """ Returns the DagRun for this TaskInstance :param session: SQLAlchemy ORM Session :return: DagRun """ info = inspect(self) if info.attrs.dag_run.loaded_value is not NO_VALUE: return self.dag_run from airflow.models.dagrun import DagRun # Avoid circular import dr = session.query(DagRun).filter(DagRun.dag_id == self.dag_id, DagRun.run_id == self.run_id).one() # Record it in the instance for next time. This means that `self.execution_date` will work correctly set_committed_value(self, 'dag_run', dr) return dr @provide_session def check_and_change_state_before_execution( self, verbose: bool = True, ignore_all_deps: bool = False, ignore_depends_on_past: bool = False, ignore_task_deps: bool = False, ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, job_id: Optional[str] = None, pool: Optional[str] = None, external_executor_id: Optional[str] = None, session=NEW_SESSION, ) -> bool: """ Checks dependencies and then sets state to RUNNING if they are met. Returns True if and only if state is set to RUNNING, which implies that task should be executed, in preparation for _run_raw_task :param verbose: whether to turn on more verbose logging :param ignore_all_deps: Ignore all of the non-critical dependencies, just runs :param ignore_depends_on_past: Ignore depends_on_past DAG attribute :param ignore_task_deps: Don't check the dependencies of this TaskInstance's task :param ignore_ti_state: Disregards previous task instance state :param mark_success: Don't run the task, mark its state as success :param test_mode: Doesn't record success or failure in the DB :param job_id: Job (BackfillJob / LocalTaskJob / SchedulerJob) ID :param pool: specifies the pool to use to run the task instance :param external_executor_id: The identifier of the celery executor :param session: SQLAlchemy ORM Session :return: whether the state was changed to running or not :rtype: bool """ task = self.task self.refresh_from_task(task, pool_override=pool) self.test_mode = test_mode self.refresh_from_db(session=session, lock_for_update=True) self.job_id = job_id self.hostname = get_hostname() self.pid = None if not ignore_all_deps and not ignore_ti_state and self.state == State.SUCCESS: Stats.incr('previously_succeeded', 1, 1) # TODO: Logging needs cleanup, not clear what is being printed hr_line_break = "\n" + ("-" * 80) # Line break if not mark_success: # Firstly find non-runnable and non-requeueable tis. # Since mark_success is not set, we do nothing. non_requeueable_dep_context = DepContext( deps=RUNNING_DEPS - REQUEUEABLE_DEPS, ignore_all_deps=ignore_all_deps, ignore_ti_state=ignore_ti_state, ignore_depends_on_past=ignore_depends_on_past, ignore_task_deps=ignore_task_deps, ) if not self.are_dependencies_met( dep_context=non_requeueable_dep_context, session=session, verbose=True ): session.commit() return False # For reporting purposes, we report based on 1-indexed, # not 0-indexed lists (i.e. Attempt 1 instead of # Attempt 0 for the first attempt). # Set the task start date. In case it was re-scheduled use the initial # start date that is recorded in task_reschedule table # If the task continues after being deferred (next_method is set), use the original start_date self.start_date = self.start_date if self.next_method else timezone.utcnow() if self.state == State.UP_FOR_RESCHEDULE: task_reschedule: TR = TR.query_for_task_instance(self, session=session).first() if task_reschedule: self.start_date = task_reschedule.start_date # Secondly we find non-runnable but requeueable tis. We reset its state. # This is because we might have hit concurrency limits, # e.g. because of backfilling. dep_context = DepContext( deps=REQUEUEABLE_DEPS, ignore_all_deps=ignore_all_deps, ignore_depends_on_past=ignore_depends_on_past, ignore_task_deps=ignore_task_deps, ignore_ti_state=ignore_ti_state, ) if not self.are_dependencies_met(dep_context=dep_context, session=session, verbose=True): self.state = State.NONE self.log.warning(hr_line_break) self.log.warning( "Rescheduling due to concurrency limits reached " "at task runtime. Attempt %s of " "%s. State set to NONE.", self.try_number, self.max_tries + 1, ) self.log.warning(hr_line_break) self.queued_dttm = timezone.utcnow() session.merge(self) session.commit() return False # print status message self.log.info(hr_line_break) self.log.info("Starting attempt %s of %s", self.try_number, self.max_tries + 1) self.log.info(hr_line_break) self._try_number += 1 if not test_mode: session.add(Log(State.RUNNING, self)) self.state = State.RUNNING self.external_executor_id = external_executor_id self.end_date = None if not test_mode: session.merge(self).task = task session.commit() # Closing all pooled connections to prevent # "max number of connections reached" settings.engine.dispose() # type: ignore if verbose: if mark_success: self.log.info("Marking success for %s on %s", self.task, self.execution_date) else: self.log.info("Executing %s on %s", self.task, self.execution_date) return True def _date_or_empty(self, attr: str): result: Optional[datetime] = getattr(self, attr, None) return result.strftime('%Y%m%dT%H%M%S') if result else '' def _log_state(self, lead_msg: str = ''): params = [ lead_msg, str(self.state).upper(), self.dag_id, self.task_id, ] message = '%sMarking task as %s. dag_id=%s, task_id=%s, ' if self.map_index >= 0: params.append(self.map_index) message += 'map_index=%d, ' self.log.info( message + 'execution_date=%s, start_date=%s, end_date=%s', *params, self._date_or_empty('execution_date'), self._date_or_empty('start_date'), self._date_or_empty('end_date'), ) # Ensure we unset next_method and next_kwargs to ensure that any # retries don't re-use them. def clear_next_method_args(self): self.log.debug("Clearing next_method and next_kwargs.") self.next_method = None self.next_kwargs = None @provide_session @Sentry.enrich_errors def _run_raw_task( self, mark_success: bool = False, test_mode: bool = False, job_id: Optional[str] = None, pool: Optional[str] = None, session=NEW_SESSION, ) -> None: """ Immediately runs the task (without checking or changing db state before execution) and then sets the appropriate final state after completion and runs any post-execute callbacks. Meant to be called only after another function changes the state to running. :param mark_success: Don't run the task, mark its state as success :param test_mode: Doesn't record success or failure in the DB :param pool: specifies the pool to use to run the task instance :param session: SQLAlchemy ORM Session """ self.test_mode = test_mode self.refresh_from_task(self.task, pool_override=pool) self.refresh_from_db(session=session) self.job_id = job_id self.hostname = get_hostname() self.pid = os.getpid() if not test_mode: session.merge(self) session.commit() actual_start_date = timezone.utcnow() Stats.incr(f'ti.start.{self.task.dag_id}.{self.task.task_id}') # Initialize final state counters at zero for state in State.task_states: Stats.incr(f'ti.finish.{self.task.dag_id}.{self.task.task_id}.{state}', count=0) self.task = self.task.prepare_for_execution() context = self.get_template_context(ignore_param_exceptions=False) try: if not mark_success: self._execute_task_with_callbacks(context, test_mode) if not test_mode: self.refresh_from_db(lock_for_update=True, session=session) self.state = State.SUCCESS except TaskDeferred as defer: # The task has signalled it wants to defer execution based on # a trigger. self._defer_task(defer=defer, session=session) self.log.info( 'Pausing task as DEFERRED. dag_id=%s, task_id=%s, execution_date=%s, start_date=%s', self.dag_id, self.task_id, self._date_or_empty('execution_date'), self._date_or_empty('start_date'), ) if not test_mode: session.add(Log(self.state, self)) session.merge(self) session.commit() return except AirflowSmartSensorException as e: self.log.info(e) return except AirflowSkipException as e: # Recording SKIP # log only if exception has any arguments to prevent log flooding if e.args: self.log.info(e) if not test_mode: self.refresh_from_db(lock_for_update=True, session=session) self.state = State.SKIPPED except AirflowRescheduleException as reschedule_exception: self._handle_reschedule(actual_start_date, reschedule_exception, test_mode, session=session) session.commit() return except (AirflowFailException, AirflowSensorTimeout) as e: # If AirflowFailException is raised, task should not retry. # If a sensor in reschedule mode reaches timeout, task should not retry. self.handle_failure(e, test_mode, context, force_fail=True, session=session) session.commit() raise except AirflowException as e: if not test_mode: self.refresh_from_db(lock_for_update=True, session=session) # for case when task is marked as success/failed externally # or dagrun timed out and task is marked as skipped # current behavior doesn't hit the callbacks if self.state in State.finished: self.clear_next_method_args() session.merge(self) session.commit() return else: self.handle_failure(e, test_mode, context, session=session) session.commit() raise except (Exception, KeyboardInterrupt) as e: self.handle_failure(e, test_mode, context, session=session) session.commit() raise finally: Stats.incr(f'ti.finish.{self.dag_id}.{self.task_id}.{self.state}') # Recording SKIPPED or SUCCESS self.clear_next_method_args() self.end_date = timezone.utcnow() self._log_state() self.set_duration() # run on_success_callback before db committing # otherwise, the LocalTaskJob sees the state is changed to `success`, # but the task_runner is still running, LocalTaskJob then treats the state is set externally! self._run_finished_callback(self.task.on_success_callback, context, 'on_success') if not test_mode: session.add(Log(self.state, self)) session.merge(self) session.commit() def _execute_task_with_callbacks(self, context, test_mode=False): """Prepare Task for Execution""" from airflow.models.renderedtifields import RenderedTaskInstanceFields parent_pid = os.getpid() def signal_handler(signum, frame): pid = os.getpid() # If a task forks during execution (from DAG code) for whatever # reason, we want to make sure that we react to the signal only in # the process that we've spawned ourselves (referred to here as the # parent process). if pid != parent_pid: os._exit(1) return self.log.error("Received SIGTERM. Terminating subprocesses.") self.task.on_kill() raise AirflowException("Task received SIGTERM signal") signal.signal(signal.SIGTERM, signal_handler) # Don't clear Xcom until the task is certain to execute, and check if we are resuming from deferral. if not self.next_method: self.clear_xcom_data() with Stats.timer(f'dag.{self.task.dag_id}.{self.task.task_id}.duration'): # Set the validated/merged params on the task object. self.task.params = context['params'] task_orig = self.render_templates(context=context) if not test_mode: rtif = RenderedTaskInstanceFields(ti=self, render_templates=False) RenderedTaskInstanceFields.write(rtif) RenderedTaskInstanceFields.delete_old_records(self.task_id, self.dag_id) # Export context to make it available for operators to use. airflow_context_vars = context_to_airflow_vars(context, in_env_var_format=True) os.environ.update(airflow_context_vars) # Log context only for the default execution method, the assumption # being that otherwise we're resuming a deferred task (in which # case there's no need to log these again). if not self.next_method: self.log.info( "Exporting the following env vars:\n%s", '\n'.join(f"{k}={v}" for k, v in airflow_context_vars.items()), ) # Run pre_execute callback self.task.pre_execute(context=context) # Run on_execute callback self._run_execute_callback(context, self.task) if self.task.is_smart_sensor_compatible(): # Try to register it in the smart sensor service. registered = False try: registered = self.task.register_in_sensor_service(self, context) except Exception: self.log.warning( "Failed to register in sensor service." " Continue to run task in non smart sensor mode.", exc_info=True, ) if registered: # Will raise AirflowSmartSensorException to avoid long running execution. self._update_ti_state_for_sensing() # Execute the task with set_current_context(context): result = self._execute_task(context, task_orig) # Run post_execute callback self.task.post_execute(context=context, result=result) Stats.incr(f'operator_successes_{self.task.task_type}', 1, 1) Stats.incr('ti_successes') @provide_session def _update_ti_state_for_sensing(self, session=NEW_SESSION): self.log.info('Submitting %s to sensor service', self) self.state = State.SENSING self.start_date = timezone.utcnow() session.merge(self) session.commit() # Raise exception for sensing state raise AirflowSmartSensorException("Task successfully registered in smart sensor.") def _run_finished_callback(self, callback, context, callback_type): """Run callback after task finishes""" try: if callback: callback(context) except Exception: # pylint: disable=broad-except self.log.exception(f"Error when executing {callback_type} callback") def _execute_task(self, context, task_orig): """Executes Task (optionally with a Timeout) and pushes Xcom results""" task_to_execute = self.task # If the task has been deferred and is being executed due to a trigger, # then we need to pick the right method to come back to, otherwise # we go for the default execute if self.next_method: # __fail__ is a special signal value for next_method that indicates # this task was scheduled specifically to fail. if self.next_method == "__fail__": next_kwargs = self.next_kwargs or {} traceback = self.next_kwargs.get("traceback") if traceback is not None: self.log.error("Trigger failed:\n%s", "\n".join(traceback)) raise TaskDeferralError(next_kwargs.get("error", "Unknown")) # Grab the callable off the Operator/Task and add in any kwargs execute_callable = getattr(task_to_execute, self.next_method) if self.next_kwargs: execute_callable = partial(execute_callable, **self.next_kwargs) else: execute_callable = task_to_execute.execute # If a timeout is specified for the task, make it fail # if it goes beyond if task_to_execute.execution_timeout: # If we are coming in with a next_method (i.e. from a deferral), # calculate the timeout from our start_date. if self.next_method: timeout_seconds = ( task_to_execute.execution_timeout - (timezone.utcnow() - self.start_date) ).total_seconds() else: timeout_seconds = task_to_execute.execution_timeout.total_seconds() try: # It's possible we're already timed out, so fast-fail if true if timeout_seconds <= 0: raise AirflowTaskTimeout() # Run task in timeout wrapper with timeout(timeout_seconds): result = execute_callable(context=context) except AirflowTaskTimeout: task_to_execute.on_kill() raise else: result = execute_callable(context=context) with create_session() as session: if task_to_execute.do_xcom_push: xcom_value = result else: xcom_value = None if xcom_value is not None: # If the task returns a result, push an XCom containing it. self.xcom_push(key=XCOM_RETURN_KEY, value=xcom_value, session=session) self._record_task_map_for_downstreams(task_orig, xcom_value, session=session) return result @provide_session def _defer_task(self, session, defer: TaskDeferred): """ Marks the task as deferred and sets up the trigger that is needed to resume it. """ from airflow.models.trigger import Trigger # First, make the trigger entry trigger_row = Trigger.from_object(defer.trigger) session.add(trigger_row) session.flush() # Then, update ourselves so it matches the deferral request # Keep an eye on the logic in `check_and_change_state_before_execution()` # depending on self.next_method semantics self.state = State.DEFERRED self.trigger_id = trigger_row.id self.next_method = defer.method_name self.next_kwargs = defer.kwargs or {} # Decrement try number so the next one is the same try self._try_number -= 1 # Calculate timeout too if it was passed if defer.timeout is not None: self.trigger_timeout = timezone.utcnow() + defer.timeout else: self.trigger_timeout = None # If an execution_timeout is set, set the timeout to the minimum of # it and the trigger timeout execution_timeout = self.task.execution_timeout if execution_timeout: if self.trigger_timeout: self.trigger_timeout = min(self.start_date + execution_timeout, self.trigger_timeout) else: self.trigger_timeout = self.start_date + execution_timeout def _run_execute_callback(self, context: Context, task): """Functions that need to be run before a Task is executed""" try: if task.on_execute_callback: task.on_execute_callback(context) except Exception: self.log.exception("Failed when executing execute callback") @provide_session def run( self, verbose: bool = True, ignore_all_deps: bool = False, ignore_depends_on_past: bool = False, ignore_task_deps: bool = False, ignore_ti_state: bool = False, mark_success: bool = False, test_mode: bool = False, job_id: Optional[str] = None, pool: Optional[str] = None, session=NEW_SESSION, ) -> None: """Run TaskInstance""" res = self.check_and_change_state_before_execution( verbose=verbose, ignore_all_deps=ignore_all_deps, ignore_depends_on_past=ignore_depends_on_past, ignore_task_deps=ignore_task_deps, ignore_ti_state=ignore_ti_state, mark_success=mark_success, test_mode=test_mode, job_id=job_id, pool=pool, session=session, ) if not res: return self._run_raw_task( mark_success=mark_success, test_mode=test_mode, job_id=job_id, pool=pool, session=session ) def dry_run(self): """Only Renders Templates for the TI""" from airflow.models.baseoperator import BaseOperator self.task = self.task.prepare_for_execution() self.render_templates() assert isinstance(self.task, BaseOperator) # For Mypy. self.task.dry_run() @provide_session def _handle_reschedule( self, actual_start_date, reschedule_exception, test_mode=False, session=NEW_SESSION ): # Don't record reschedule request in test mode if test_mode: return from airflow.models.dagrun import DagRun # Avoid circular import self.refresh_from_db(session) self.end_date = timezone.utcnow() self.set_duration() # Lock DAG run to be sure not to get into a deadlock situation when trying to insert # TaskReschedule which apparently also creates lock on corresponding DagRun entity with_row_locks( session.query(DagRun).filter_by( dag_id=self.dag_id, run_id=self.run_id, ), session=session, ).one() # Log reschedule request session.add( TaskReschedule( self.task, self.run_id, self._try_number, actual_start_date, self.end_date, reschedule_exception.reschedule_date, ) ) # set state self.state = State.UP_FOR_RESCHEDULE # Decrement try_number so subsequent runs will use the same try number and write # to same log file. self._try_number -= 1 self.clear_next_method_args() session.merge(self) session.commit() self.log.info('Rescheduling task, marking task as UP_FOR_RESCHEDULE') @staticmethod def get_truncated_error_traceback(error: BaseException, truncate_to: Callable) -> Optional[TracebackType]: """ Truncates the traceback of an exception to the first frame called from within a given function :param error: exception to get traceback from :param truncate_to: Function to truncate TB to. Must have a ``__code__`` attribute :meta private: """ tb = error.__traceback__ code = truncate_to.__func__.__code__ # type: ignore[attr-defined] while tb is not None: if tb.tb_frame.f_code is code: return tb.tb_next tb = tb.tb_next return tb or error.__traceback__ @provide_session def handle_failure(self, error, test_mode=None, context=None, force_fail=False, session=None) -> None: """Handle Failure for the TaskInstance""" if test_mode is None: test_mode = self.test_mode if context is None: context = self.get_template_context() if error: if isinstance(error, BaseException): tb = self.get_truncated_error_traceback(error, truncate_to=self._execute_task) self.log.error("Task failed with exception", exc_info=(type(error), error, tb)) else: self.log.error("%s", error) if not test_mode: self.refresh_from_db(session) self.end_date = timezone.utcnow() self.set_duration() Stats.incr(f'operator_failures_{self.task.task_type}') Stats.incr('ti_failures') if not test_mode: session.add(Log(State.FAILED, self)) # Log failure duration session.add(TaskFail(ti=self)) self.clear_next_method_args() if context is not None: context['exception'] = error # Set state correctly and figure out how to log it and decide whether # to email # Note, callback invocation needs to be handled by caller of # _run_raw_task to avoid race conditions which could lead to duplicate # invocations or miss invocation. # Since this function is called only when the TaskInstance state is running, # try_number contains the current try_number (not the next). We # only mark task instance as FAILED if the next task instance # try_number exceeds the max_tries ... or if force_fail is truthy task = None try: task = self.task.unmap() except Exception: self.log.error("Unable to unmap task, can't determine if we need to send an alert email or not") if force_fail or not self.is_eligible_to_retry(): self.state = State.FAILED email_for_state = operator.attrgetter('email_on_failure') callback = task.on_failure_callback if task else None callback_type = 'on_failure' else: if self.state == State.QUEUED: # We increase the try_number so as to fail the task if it fails to start after sometime self._try_number += 1 self.state = State.UP_FOR_RETRY email_for_state = operator.attrgetter('email_on_retry') callback = task.on_retry_callback if task else None callback_type = 'on_retry' self._log_state('Immediate failure requested. ' if force_fail else '') if task and email_for_state(task) and task.email: try: self.email_alert(error, task) except Exception: self.log.exception('Failed to send email to: %s', task.email) if callback: self._run_finished_callback(callback, context, callback_type) if not test_mode: session.merge(self) session.flush() def is_eligible_to_retry(self): """Is task instance is eligible for retry""" if self.state == State.RESTARTING: # If a task is cleared when running, it goes into RESTARTING state and is always # eligible for retry return True return self.task.retries and self.try_number <= self.max_tries def get_template_context( self, session: Session = NEW_SESSION, ignore_param_exceptions: bool = True ) -> Context: """Return TI Context""" # Do not use provide_session here -- it expunges everything on exit! if not session: session = settings.Session() from airflow import macros integrate_macros_plugins() task = self.task assert task.dag # For Mypy. dag: DAG = task.dag dag_run = self.get_dagrun(session) data_interval = dag.get_run_data_interval(dag_run) # Validates Params and convert them into a simple dict. params = ParamsDict(suppress_exception=ignore_param_exceptions) with contextlib.suppress(AttributeError): params.update(dag.params) if task.params: params.update(task.params) if conf.getboolean('core', 'dag_run_conf_overrides_params'): self.overwrite_params_with_dag_run_conf(params=params, dag_run=dag_run) validated_params = params.validate() logical_date = timezone.coerce_datetime(self.execution_date) ds = logical_date.strftime('%Y-%m-%d') ds_nodash = ds.replace('-', '') ts = logical_date.isoformat() ts_nodash = logical_date.strftime('%Y%m%dT%H%M%S') ts_nodash_with_tz = ts.replace('-', '').replace(':', '') @cache # Prevent multiple database access. def _get_previous_dagrun_success() -> Optional["DagRun"]: return self.get_previous_dagrun(state=DagRunState.SUCCESS, session=session) def _get_previous_dagrun_data_interval_success() -> Optional["DataInterval"]: dagrun = _get_previous_dagrun_success() if dagrun is None: return None return dag.get_run_data_interval(dagrun) def get_prev_data_interval_start_success() -> Optional[pendulum.DateTime]: data_interval = _get_previous_dagrun_data_interval_success() if data_interval is None: return None return data_interval.start def get_prev_data_interval_end_success() -> Optional[pendulum.DateTime]: data_interval = _get_previous_dagrun_data_interval_success() if data_interval is None: return None return data_interval.end def get_prev_start_date_success() -> Optional[pendulum.DateTime]: dagrun = _get_previous_dagrun_success() if dagrun is None: return None return timezone.coerce_datetime(dagrun.start_date) @cache def get_yesterday_ds() -> str: return (logical_date - timedelta(1)).strftime('%Y-%m-%d') def get_yesterday_ds_nodash() -> str: return get_yesterday_ds().replace('-', '') @cache def get_tomorrow_ds() -> str: return (logical_date + timedelta(1)).strftime('%Y-%m-%d') def get_tomorrow_ds_nodash() -> str: return get_tomorrow_ds().replace('-', '') @cache def get_next_execution_date() -> Optional[pendulum.DateTime]: # For manually triggered dagruns that aren't run on a schedule, # the "next" execution date doesn't make sense, and should be set # to execution date for consistency with how execution_date is set # for manually triggered tasks, i.e. triggered_date == execution_date. if dag_run.external_trigger: return logical_date if dag is None: return None next_info = dag.next_dagrun_info(data_interval, restricted=False) if next_info is None: return None return timezone.coerce_datetime(next_info.logical_date) def get_next_ds() -> Optional[str]: execution_date = get_next_execution_date() if execution_date is None: return None return execution_date.strftime('%Y-%m-%d') def get_next_ds_nodash() -> Optional[str]: ds = get_next_ds() if ds is None: return ds return ds.replace('-', '') @cache def get_prev_execution_date(): # For manually triggered dagruns that aren't run on a schedule, # the "previous" execution date doesn't make sense, and should be set # to execution date for consistency with how execution_date is set # for manually triggered tasks, i.e. triggered_date == execution_date. if dag_run.external_trigger: return logical_date with warnings.catch_warnings(): warnings.simplefilter("ignore", DeprecationWarning) return dag.previous_schedule(logical_date) @cache def get_prev_ds() -> Optional[str]: execution_date = get_prev_execution_date() if execution_date is None: return None return execution_date.strftime(r'%Y-%m-%d') def get_prev_ds_nodash() -> Optional[str]: prev_ds = get_prev_ds() if prev_ds is None: return None return prev_ds.replace('-', '') # NOTE: If you add anything to this dict, make sure to also update the # definition in airflow/utils/context.pyi, and KNOWN_CONTEXT_KEYS in # airflow/utils/context.py! context = { 'conf': conf, 'dag': dag, 'dag_run': dag_run, 'data_interval_end': timezone.coerce_datetime(data_interval.end), 'data_interval_start': timezone.coerce_datetime(data_interval.start), 'ds': ds, 'ds_nodash': ds_nodash, 'execution_date': logical_date, 'inlets': task.inlets, 'logical_date': logical_date, 'macros': macros, 'next_ds': get_next_ds(), 'next_ds_nodash': get_next_ds_nodash(), 'next_execution_date': get_next_execution_date(), 'outlets': task.outlets, 'params': validated_params, 'prev_data_interval_start_success': get_prev_data_interval_start_success(), 'prev_data_interval_end_success': get_prev_data_interval_end_success(), 'prev_ds': get_prev_ds(), 'prev_ds_nodash': get_prev_ds_nodash(), 'prev_execution_date': get_prev_execution_date(), 'prev_execution_date_success': self.get_previous_execution_date( state=DagRunState.SUCCESS, session=session, ), 'prev_start_date_success': get_prev_start_date_success(), 'run_id': self.run_id, 'task': task, 'task_instance': self, 'task_instance_key_str': f"{task.dag_id}__{task.task_id}__{ds_nodash}", 'test_mode': self.test_mode, 'ti': self, 'tomorrow_ds': get_tomorrow_ds(), 'tomorrow_ds_nodash': get_tomorrow_ds_nodash(), 'ts': ts, 'ts_nodash': ts_nodash, 'ts_nodash_with_tz': ts_nodash_with_tz, 'var': { 'json': VariableAccessor(deserialize_json=True), 'value': VariableAccessor(deserialize_json=False), }, 'conn': ConnectionAccessor(), 'yesterday_ds': get_yesterday_ds(), 'yesterday_ds_nodash': get_yesterday_ds_nodash(), } # Mypy doesn't like turning existing dicts in to a TypeDict -- and we "lie" in the type stub to say it # is one, but in practice it isn't. See https://github.com/python/mypy/issues/8890 return Context(context) # type: ignore @provide_session def get_rendered_template_fields(self, session: Session = NEW_SESSION) -> None: """ Update task with rendered template fields for presentation in UI. If task has already run, will fetch from DB; otherwise will render. """ from airflow.models.renderedtifields import RenderedTaskInstanceFields rendered_task_instance_fields = RenderedTaskInstanceFields.get_templated_fields(self, session=session) if rendered_task_instance_fields: self.task = self.task.unmap() for field_name, rendered_value in rendered_task_instance_fields.items(): setattr(self.task, field_name, rendered_value) return try: # If we get here, either the task hasn't run or the RTIF record was purged. from airflow.utils.log.secrets_masker import redact self.render_templates() for field_name in self.task.template_fields: rendered_value = getattr(self.task, field_name) setattr(self.task, field_name, redact(rendered_value, field_name)) except (TemplateAssertionError, UndefinedError) as e: raise AirflowException( "Webserver does not have access to User-defined Macros or Filters " "when Dag Serialization is enabled. Hence for the task that have not yet " "started running, please use 'airflow tasks render' for debugging the " "rendering of template_fields." ) from e @provide_session def get_rendered_k8s_spec(self, session=NEW_SESSION): """Fetch rendered template fields from DB""" from airflow.models.renderedtifields import RenderedTaskInstanceFields rendered_k8s_spec = RenderedTaskInstanceFields.get_k8s_pod_yaml(self, session=session) if not rendered_k8s_spec: try: rendered_k8s_spec = self.render_k8s_pod_yaml() except (TemplateAssertionError, UndefinedError) as e: raise AirflowException(f"Unable to render a k8s spec for this taskinstance: {e}") from e return rendered_k8s_spec def overwrite_params_with_dag_run_conf(self, params, dag_run): """Overwrite Task Params with DagRun.conf""" if dag_run and dag_run.conf: self.log.debug("Updating task params (%s) with DagRun.conf (%s)", params, dag_run.conf) params.update(dag_run.conf) def render_templates(self, context: Optional[Context] = None) -> "Operator": """Render templates in the operator fields. If the task was originally mapped, this may replace ``self.task`` with the unmapped, fully rendered BaseOperator. The original ``self.task`` before replacement is returned. """ if not context: context = self.get_template_context() rendered_task = self.task.render_template_fields(context) if rendered_task is None: # Compatibility -- custom renderer, assume unmapped. return self.task original_task, self.task = self.task, rendered_task return original_task def render_k8s_pod_yaml(self) -> Optional[dict]: """Render k8s pod yaml""" from kubernetes.client.api_client import ApiClient from airflow.kubernetes.kube_config import KubeConfig from airflow.kubernetes.kubernetes_helper_functions import create_pod_id # Circular import from airflow.kubernetes.pod_generator import PodGenerator kube_config = KubeConfig() pod = PodGenerator.construct_pod( dag_id=self.dag_id, run_id=self.run_id, task_id=self.task_id, map_index=self.map_index, date=None, pod_id=create_pod_id(self.dag_id, self.task_id), try_number=self.try_number, kube_image=kube_config.kube_image, args=self.command_as_list(), pod_override_object=PodGenerator.from_obj(self.executor_config), scheduler_job_id="0", namespace=kube_config.executor_namespace, base_worker_pod=PodGenerator.deserialize_model_file(kube_config.pod_template_file), ) settings.pod_mutation_hook(pod) sanitized_pod = ApiClient().sanitize_for_serialization(pod) return sanitized_pod def get_email_subject_content( self, exception: BaseException, task: Optional["BaseOperator"] = None ) -> Tuple[str, str, str]: """Get the email subject content for exceptions.""" # For a ti from DB (without ti.task), return the default value # Reuse it for smart sensor to send default email alert if task is None: task = getattr(self, 'task') use_default = task is None exception_html = str(exception).replace('\n', '<br>') default_subject = 'Airflow alert: {{ti}}' # For reporting purposes, we report based on 1-indexed, # not 0-indexed lists (i.e. Try 1 instead of # Try 0 for the first attempt). default_html_content = ( 'Try {{try_number}} out of {{max_tries + 1}}<br>' 'Exception:<br>{{exception_html}}<br>' 'Log: <a href="{{ti.log_url}}">Link</a><br>' 'Host: {{ti.hostname}}<br>' 'Mark success: <a href="{{ti.mark_success_url}}">Link</a><br>' ) default_html_content_err = ( 'Try {{try_number}} out of {{max_tries + 1}}<br>' 'Exception:<br>Failed attempt to attach error logs<br>' 'Log: <a href="{{ti.log_url}}">Link</a><br>' 'Host: {{ti.hostname}}<br>' 'Mark success: <a href="{{ti.mark_success_url}}">Link</a><br>' ) # This function is called after changing the state from State.RUNNING, # so we need to subtract 1 from self.try_number here. current_try_number = self.try_number - 1 additional_context: Dict[str, Any] = { "exception": exception, "exception_html": exception_html, "try_number": current_try_number, "max_tries": self.max_tries, } if use_default: default_context = {"ti": self, **additional_context} jinja_env = jinja2.Environment( loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), autoescape=True ) subject = jinja_env.from_string(default_subject).render(**default_context) html_content = jinja_env.from_string(default_html_content).render(**default_context) html_content_err = jinja_env.from_string(default_html_content_err).render(**default_context) else: # Use the DAG's get_template_env() to set force_sandboxed. Don't add # the flag to the function on task object -- that function can be # overridden, and adding a flag breaks backward compatibility. dag = self.task.get_dag() if dag: jinja_env = dag.get_template_env(force_sandboxed=True) else: jinja_env = SandboxedEnvironment(cache_size=0) jinja_context = self.get_template_context() context_merge(jinja_context, additional_context) def render(key: str, content: str) -> str: if conf.has_option('email', key): path = conf.get_mandatory_value('email', key) with open(path) as f: content = f.read() return render_template_to_string(jinja_env.from_string(content), jinja_context) subject = render('subject_template', default_subject) html_content = render('html_content_template', default_html_content) html_content_err = render('html_content_template', default_html_content_err) return subject, html_content, html_content_err def email_alert(self, exception, task: "BaseOperator"): """Send alert email with exception information.""" subject, html_content, html_content_err = self.get_email_subject_content(exception, task=task) assert task.email try: send_email(task.email, subject, html_content) except Exception: send_email(task.email, subject, html_content_err) def set_duration(self) -> None: """Set TI duration""" if self.end_date and self.start_date: self.duration = (self.end_date - self.start_date).total_seconds() else: self.duration = None self.log.debug("Task Duration set to %s", self.duration) def _record_task_map_for_downstreams(self, task: "Operator", value: Any, *, session: Session) -> None: # TODO: We don't push TaskMap for mapped task instances because it's not # currently possible for a downstream to depend on one individual mapped # task instance, only a task as a whole. This will change in AIP-42 # Phase 2, and we'll need to further analyze the mapped task case. if next(task.iter_mapped_dependants(), None) is None: return if value is None: raise XComForMappingNotPushed() if task.is_mapped: return if not isinstance(value, collections.abc.Collection) or isinstance(value, (bytes, str)): raise UnmappableXComTypePushed(value) task_map = TaskMap.from_task_instance_xcom(self, value) max_map_length = conf.getint("core", "max_map_length", fallback=1024) if task_map.length > max_map_length: raise UnmappableXComLengthPushed(value, max_map_length) session.merge(task_map) @provide_session def xcom_push( self, key: str, value: Any, execution_date: Optional[datetime] = None, session: Session = NEW_SESSION, ) -> None: """ Make an XCom available for tasks to pull. :param key: Key to store the value under. :param value: Value to store. What types are possible depends on whether ``enable_xcom_pickling`` is true or not. If so, this can be any picklable object; only be JSON-serializable may be used otherwise. :param execution_date: Deprecated parameter that has no effect. """ if execution_date is not None: self_execution_date = self.get_dagrun(session).execution_date if execution_date < self_execution_date: raise ValueError( f'execution_date can not be in the past (current execution_date is ' f'{self_execution_date}; received {execution_date})' ) elif execution_date is not None: message = "Passing 'execution_date' to 'TaskInstance.xcom_push()' is deprecated." warnings.warn(message, DeprecationWarning, stacklevel=3) XCom.set( key=key, value=value, task_id=self.task_id, dag_id=self.dag_id, run_id=self.run_id, map_index=self.map_index, session=session, ) @provide_session def xcom_pull( self, task_ids: Optional[Union[str, Iterable[str]]] = None, dag_id: Optional[str] = None, key: str = XCOM_RETURN_KEY, include_prior_dates: bool = False, session: Session = NEW_SESSION, *, map_indexes: Optional[Union[int, Iterable[int]]] = None, default: Any = None, ) -> Any: """Pull XComs that optionally meet certain criteria. :param key: A key for the XCom. If provided, only XComs with matching keys will be returned. The default key is ``'return_value'``, also available as constant ``XCOM_RETURN_KEY``. This key is automatically given to XComs returned by tasks (as opposed to being pushed manually). To remove the filter, pass *None*. :param task_ids: Only XComs from tasks with matching ids will be pulled. Pass *None* to remove the filter. :param dag_id: If provided, only pulls XComs from this DAG. If *None* (default), the DAG of the calling task is used. :param map_indexes: If provided, only pull XComs with matching indexes. If *None* (default), this is inferred from the task(s) being pulled (see below for details). :param include_prior_dates: If False, only XComs from the current execution_date are returned. If *True*, XComs from previous dates are returned as well. When pulling one single task (``task_id`` is *None* or a str) without specifying ``map_indexes``, the return value is inferred from whether the specified task is mapped. If not, value from the one single task instance is returned. If the task to pull is mapped, an iterator (not a list) yielding XComs from mapped task instances is returned. In either case, ``default`` (*None* if not specified) is returned if no matching XComs are found. When pulling multiple tasks (i.e. either ``task_id`` or ``map_index`` is a non-str iterable), a list of matching XComs is returned. Elements in the list is ordered by item ordering in ``task_id`` and ``map_index``. """ if dag_id is None: dag_id = self.dag_id query = XCom.get_many( key=key, run_id=self.run_id, dag_ids=dag_id, task_ids=task_ids, map_indexes=map_indexes, include_prior_dates=include_prior_dates, session=session, ) # NOTE: Since we're only fetching the value field and not the whole # class, the @recreate annotation does not kick in. Therefore we need to # call XCom.deserialize_value() manually. # We are only pulling one single task. if (task_ids is None or isinstance(task_ids, str)) and not isinstance(map_indexes, Iterable): first = query.with_entities( XCom.run_id, XCom.task_id, XCom.dag_id, XCom.map_index, XCom.value ).first() if first is None: # No matching XCom at all. return default if map_indexes is not None or first.map_index < 0: return XCom.deserialize_value(first) return _LazyXComAccess.build_from_single_xcom(first, query) # At this point either task_ids or map_indexes is explicitly multi-value. results = ( (r.task_id, r.map_index, XCom.deserialize_value(r)) for r in query.with_entities(XCom.task_id, XCom.map_index, XCom.value) ) if task_ids is None: task_id_pos: Dict[str, int] = defaultdict(int) elif isinstance(task_ids, str): task_id_pos = {task_ids: 0} else: task_id_pos = {task_id: i for i, task_id in enumerate(task_ids)} if map_indexes is None: map_index_pos: Dict[int, int] = defaultdict(int) elif isinstance(map_indexes, int): map_index_pos = {map_indexes: 0} else: map_index_pos = {map_index: i for i, map_index in enumerate(map_indexes)} def _arg_pos(item: Tuple[str, int, Any]) -> Tuple[int, int]: task_id, map_index, _ = item return task_id_pos[task_id], map_index_pos[map_index] results_sorted_by_arg_pos = sorted(results, key=_arg_pos) return [value for _, _, value in results_sorted_by_arg_pos] @provide_session def get_num_running_task_instances(self, session): """Return Number of running TIs from the DB""" # .count() is inefficient return ( session.query(func.count()) .filter( TaskInstance.dag_id == self.dag_id, TaskInstance.task_id == self.task_id, TaskInstance.state == State.RUNNING, ) .scalar() ) def init_run_context(self, raw=False): """Sets the log context.""" self.raw = raw self._set_context(self) @staticmethod def filter_for_tis(tis: Iterable[Union["TaskInstance", TaskInstanceKey]]) -> Optional[BooleanClauseList]: """Returns SQLAlchemy filter to query selected task instances""" # DictKeys type, (what we often pass here from the scheduler) is not directly indexable :( # Or it might be a generator, but we need to be able to iterate over it more than once tis = list(tis) if not tis: return None first = tis[0] dag_id = first.dag_id run_id = first.run_id map_index = first.map_index first_task_id = first.task_id # Common path optimisations: when all TIs are for the same dag_id and run_id, or same dag_id # and task_id -- this can be over 150x faster for huge numbers of TIs (20k+) if all(t.dag_id == dag_id and t.run_id == run_id and t.map_index == map_index for t in tis): return and_( TaskInstance.dag_id == dag_id, TaskInstance.run_id == run_id, TaskInstance.map_index == map_index, TaskInstance.task_id.in_(t.task_id for t in tis), ) if all(t.dag_id == dag_id and t.task_id == first_task_id and t.map_index == map_index for t in tis): return and_( TaskInstance.dag_id == dag_id, TaskInstance.run_id.in_(t.run_id for t in tis), TaskInstance.map_index == map_index, TaskInstance.task_id == first_task_id, ) if all(t.dag_id == dag_id and t.run_id == run_id and t.task_id == first_task_id for t in tis): return and_( TaskInstance.dag_id == dag_id, TaskInstance.run_id == run_id, TaskInstance.map_index.in_(t.map_index for t in tis), TaskInstance.task_id == first_task_id, ) return tuple_in_condition( (TaskInstance.dag_id, TaskInstance.task_id, TaskInstance.run_id, TaskInstance.map_index), (ti.key.primary for ti in tis), ) @classmethod def ti_selector_condition(cls, vals: Collection[Union[str, Tuple[str, int]]]) -> ColumnOperators: """ Build an SQLAlchemy filter for a list where each element can contain whether a task_id, or a tuple of (task_id,map_index) :meta private: """ # Compute a filter for TI.task_id and TI.map_index based on input values # For each item, it will either be a task_id, or (task_id, map_index) task_id_only = [v for v in vals if isinstance(v, str)] with_map_index = [v for v in vals if not isinstance(v, str)] filters: List[ColumnOperators] = [] if task_id_only: filters.append(cls.task_id.in_(task_id_only)) if with_map_index: filters.append(tuple_in_condition((cls.task_id, cls.map_index), with_map_index)) if not filters: return false() if len(filters) == 1: return filters[0] return or_(*filters) # State of the task instance. # Stores string version of the task state. TaskInstanceStateType = Tuple[TaskInstanceKey, str] class SimpleTaskInstance: """ Simplified Task Instance. Used to send data between processes via Queues. """ def __init__( self, dag_id: str, task_id: str, run_id: str, start_date: Optional[datetime], end_date: Optional[datetime], try_number: int, map_index: int, state: str, executor_config: Any, pool: str, queue: str, key: TaskInstanceKey, run_as_user: Optional[str] = None, priority_weight: Optional[int] = None, ): self.dag_id = dag_id self.task_id = task_id self.run_id = run_id self.map_index = map_index self.start_date = start_date self.end_date = end_date self.try_number = try_number self.state = state self.executor_config = executor_config self.run_as_user = run_as_user self.pool = pool self.priority_weight = priority_weight self.queue = queue self.key = key def __eq__(self, other): if isinstance(other, self.__class__): return self.__dict__ == other.__dict__ return NotImplemented @classmethod def from_ti(cls, ti: TaskInstance): return cls( dag_id=ti.dag_id, task_id=ti.task_id, run_id=ti.run_id, map_index=ti.map_index, start_date=ti.start_date, end_date=ti.end_date, try_number=ti.try_number, state=ti.state, executor_config=ti.executor_config, pool=ti.pool, queue=ti.queue, key=ti.key, run_as_user=ti.run_as_user if hasattr(ti, 'run_as_user') else None, priority_weight=ti.priority_weight if hasattr(ti, 'priority_weight') else None, ) @classmethod def from_dict(cls, obj_dict: dict): ti_key = TaskInstanceKey(*obj_dict.pop('key')) start_date = None end_date = None start_date_str: Optional[str] = obj_dict.pop('start_date') end_date_str: Optional[str] = obj_dict.pop('end_date') if start_date_str: start_date = timezone.parse(start_date_str) if end_date_str: end_date = timezone.parse(end_date_str) return cls(**obj_dict, start_date=start_date, end_date=end_date, key=ti_key) STATICA_HACK = True globals()['kcah_acitats'[::-1].upper()] = False if STATICA_HACK: # pragma: no cover from airflow.jobs.base_job import BaseJob TaskInstance.queued_by_job = relationship(BaseJob)
{ "content_hash": "2d02d107e49c2366409b8c95cf516ef5", "timestamp": "", "source": "github", "line_count": 2600, "max_line_length": 110, "avg_line_length": 39.223461538461535, "alnum_prop": 0.5925417479726616, "repo_name": "Acehaidrey/incubator-airflow", "id": "89503852487d009918c65f7662980325c4dbea6b", "size": "102768", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "airflow/models/taskinstance.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "25785" }, { "name": "Dockerfile", "bytes": "76693" }, { "name": "HCL", "bytes": "3786" }, { "name": "HTML", "bytes": "164512" }, { "name": "JavaScript", "bytes": "236992" }, { "name": "Jinja", "bytes": "37155" }, { "name": "Jupyter Notebook", "bytes": "2929" }, { "name": "Mako", "bytes": "1339" }, { "name": "Python", "bytes": "21727510" }, { "name": "R", "bytes": "313" }, { "name": "Shell", "bytes": "495253" }, { "name": "TypeScript", "bytes": "326556" } ], "symlink_target": "" }
import os from distutils.version import LooseVersion # pylint: disable=E0611,E0401 from nose.plugins.attrib import attr # project from checks import AgentCheck from tests.checks.common import AgentCheckTest @attr(requires='zk') class ZooKeeperTestCase(AgentCheckTest): CHECK_NAME = 'zk' CONFIG = { 'host': "127.0.0.1", 'port': 12181, 'expected_mode': "standalone", 'tags': ["mytag"] } WRONG_EXPECTED_MODE = { 'host': "127.0.0.1", 'port': 12181, 'expected_mode': "follower", 'tags': [] } CONNECTION_FAILURE_CONFIG = { 'host': "127.0.0.1", 'port': 2182, 'expected_mode': "down", 'tags': [] } STAT_METRICS = [ 'zookeeper.latency.min', 'zookeeper.latency.avg', 'zookeeper.latency.max', 'zookeeper.bytes_received', 'zookeeper.bytes_sent', 'zookeeper.connections', 'zookeeper.connections', 'zookeeper.bytes_outstanding', 'zookeeper.outstanding_requests', 'zookeeper.zxid.epoch', 'zookeeper.zxid.count', 'zookeeper.nodes', 'zookeeper.instances', ] MNTR_METRICS = [ 'zookeeper.packets_sent', 'zookeeper.approximate_data_size', 'zookeeper.num_alive_connections', 'zookeeper.open_file_descriptor_count', 'zookeeper.avg_latency', 'zookeeper.znode_count', 'zookeeper.outstanding_requests', 'zookeeper.min_latency', 'zookeeper.ephemerals_count', 'zookeeper.watch_count', 'zookeeper.max_file_descriptor_count', 'zookeeper.packets_received', 'zookeeper.max_latency', ] STATUS_TYPES = [ 'leader', 'follower', 'observer', 'standalone', 'down', 'inactive', 'unknown', ] def test_check(self): """ Collect ZooKeeper metrics. """ config = { 'instances': [self.CONFIG] } self.run_check_twice(config) # Test metrics for mname in self.STAT_METRICS: self.assertMetric(mname, tags=["mode:standalone", "mytag"], count=1) zk_version = os.environ.get("FLAVOR_VERSION") or "3.4.10" if zk_version and LooseVersion(zk_version) > LooseVersion("3.4.0"): for mname in self.MNTR_METRICS: self.assertMetric(mname, tags=["mode:standalone", "mytag"], count=1) # Test service checks self.assertServiceCheck("zookeeper.ruok", status=AgentCheck.OK) self.assertServiceCheck("zookeeper.mode", status=AgentCheck.OK) expected_mode = self.CONFIG['expected_mode'] mname = "zookeeper.instances." + expected_mode self.assertMetric(mname, value=1, count=1) self.coverage_report() def test_wrong_expected_mode(self): """ Raise a 'critical' service check when ZooKeeper is not in the expected mode. """ config = { 'instances': [self.WRONG_EXPECTED_MODE] } self.run_check(config) # Test service checks self.assertServiceCheck("zookeeper.mode", status=AgentCheck.CRITICAL) def test_error_state(self): """ Raise a 'critical' service check when ZooKeeper is in an error state. Report status as down. """ config = { 'instances': [self.CONNECTION_FAILURE_CONFIG] } self.assertRaises( Exception, lambda: self.run_check(config) ) self.assertServiceCheck("zookeeper.ruok", status=AgentCheck.CRITICAL) self.assertMetric("zookeeper.instances", tags=["mode:down"], count=1) expected_mode = self.CONNECTION_FAILURE_CONFIG['expected_mode'] mname = "zookeeper.instances." + expected_mode self.assertMetric(mname, value=1, count=1)
{ "content_hash": "3fb9b396ba2f588f0b349dca099620d5", "timestamp": "", "source": "github", "line_count": 138, "max_line_length": 84, "avg_line_length": 28.36231884057971, "alnum_prop": 0.5822687787429739, "repo_name": "itsuugo/integrations-core", "id": "c3d05c823841d9fc05379c5fe63541cdf4b0dcac", "size": "4030", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "zk/test_zk.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "ApacheConf", "bytes": "2692" }, { "name": "Erlang", "bytes": "15429" }, { "name": "Go", "bytes": "1471" }, { "name": "Nginx", "bytes": "1173" }, { "name": "PLSQL", "bytes": "28501" }, { "name": "Perl", "bytes": "5845" }, { "name": "Python", "bytes": "1733132" }, { "name": "Ruby", "bytes": "177186" }, { "name": "Shell", "bytes": "11831" } ], "symlink_target": "" }
from __future__ import annotations import json import unittest from unittest import mock from unittest.mock import PropertyMock import httplib2 import pytest from googleapiclient.errors import HttpError from parameterized import parameterized from airflow.exceptions import AirflowException from airflow.models import Connection from airflow.providers.google.cloud.hooks.cloud_sql import CloudSQLDatabaseHook, CloudSQLHook from tests.providers.google.cloud.utils.base_gcp_mock import ( mock_base_gcp_hook_default_project_id, mock_base_gcp_hook_no_default_project_id, ) class TestGcpSqlHookDefaultProjectId(unittest.TestCase): def setUp(self): with mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__', new=mock_base_gcp_hook_default_project_id, ): self.cloudsql_hook = CloudSQLHook(api_version='v1', gcp_conn_id='test') @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) def test_instance_import_exception(self, mock_get_credentials): self.cloudsql_hook.get_conn = mock.Mock( side_effect=HttpError(resp=httplib2.Response({'status': 400}), content=b'Error content') ) with pytest.raises(AirflowException) as ctx: self.cloudsql_hook.import_instance(instance='instance', body={}) err = ctx.value assert "Importing instance " in str(err) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) def test_instance_export_exception(self, mock_get_credentials): self.cloudsql_hook.get_conn = mock.Mock( side_effect=HttpError(resp=httplib2.Response({'status': 400}), content=b'Error content') ) with pytest.raises(HttpError) as ctx: self.cloudsql_hook.export_instance(instance='instance', body={}) err = ctx.value assert 400 == err.resp.status assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_instance_import(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): import_method = get_conn.return_value.instances.return_value.import_ execute_method = import_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.import_instance(instance='instance', body={}) import_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( project_id='example-project', operation_name='operation_id' ) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_instance_export(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): export_method = get_conn.return_value.instances.return_value.export execute_method = export_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.export_instance(instance='instance', body={}) export_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( project_id='example-project', operation_name='operation_id' ) assert 1 == mock_get_credentials.call_count @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_instance_export_with_in_progress_retry(self, wait_for_operation_to_complete, get_conn): export_method = get_conn.return_value.instances.return_value.export execute_method = export_method.return_value.execute execute_method.side_effect = [ HttpError( resp=type( '', (object,), { "status": 429, }, )(), content=b'Internal Server Error', ), {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None self.cloudsql_hook.export_instance(project_id='example-project', instance='instance', body={}) assert 2 == export_method.call_count assert 2 == execute_method.call_count wait_for_operation_to_complete.assert_called_once_with( project_id='example-project', operation_name='operation_id' ) @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_get_instance(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): get_method = get_conn.return_value.instances.return_value.get execute_method = get_method.return_value.execute execute_method.return_value = {"name": "instance"} wait_for_operation_to_complete.return_value = None res = self.cloudsql_hook.get_instance(instance='instance') assert res is not None assert 'instance' == res['name'] get_method.assert_called_once_with(instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_not_called() assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_create_instance(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): insert_method = get_conn.return_value.instances.return_value.insert execute_method = insert_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.create_instance(body={}) insert_method.assert_called_once_with(body={}, project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_create_instance_with_in_progress_retry( self, wait_for_operation_to_complete, get_conn, mock_get_credentials ): insert_method = get_conn.return_value.instances.return_value.insert execute_method = insert_method.return_value.execute execute_method.side_effect = [ HttpError( resp=type( '', (object,), { "status": 429, }, )(), content=b'Internal Server Error', ), {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None self.cloudsql_hook.create_instance(body={}) assert 1 == mock_get_credentials.call_count assert 2 == insert_method.call_count assert 2 == execute_method.call_count wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_patch_instance_with_in_progress_retry( self, wait_for_operation_to_complete, get_conn, mock_get_credentials ): patch_method = get_conn.return_value.instances.return_value.patch execute_method = patch_method.return_value.execute execute_method.side_effect = [ HttpError( resp=type( '', (object,), { "status": 429, }, )(), content=b'Internal Server Error', ), {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None self.cloudsql_hook.patch_instance(instance='instance', body={}) assert 1 == mock_get_credentials.call_count assert 2 == patch_method.call_count assert 2 == execute_method.call_count wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_patch_instance(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): patch_method = get_conn.return_value.instances.return_value.patch execute_method = patch_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.patch_instance(instance='instance', body={}) patch_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_delete_instance(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): delete_method = get_conn.return_value.instances.return_value.delete execute_method = delete_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.delete_instance(instance='instance') delete_method.assert_called_once_with(instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_delete_instance_with_in_progress_retry( self, wait_for_operation_to_complete, get_conn, mock_get_credentials ): delete_method = get_conn.return_value.instances.return_value.delete execute_method = delete_method.return_value.execute execute_method.side_effect = [ HttpError( resp=type( '', (object,), { "status": 429, }, )(), content=b'Internal Server Error', ), {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None self.cloudsql_hook.delete_instance(instance='instance') assert 1 == mock_get_credentials.call_count assert 2 == delete_method.call_count assert 2 == execute_method.call_count wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_get_database(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): get_method = get_conn.return_value.databases.return_value.get execute_method = get_method.return_value.execute execute_method.return_value = {"name": "database"} wait_for_operation_to_complete.return_value = None res = self.cloudsql_hook.get_database(database='database', instance='instance') assert res is not None assert 'database' == res['name'] get_method.assert_called_once_with( instance='instance', database='database', project='example-project' ) execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_not_called() assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_create_database(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): insert_method = get_conn.return_value.databases.return_value.insert execute_method = insert_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.create_database(instance='instance', body={}) insert_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_create_database_with_in_progress_retry( self, wait_for_operation_to_complete, get_conn, mock_get_credentials ): insert_method = get_conn.return_value.databases.return_value.insert execute_method = insert_method.return_value.execute execute_method.side_effect = [ HttpError( resp=type( '', (object,), { "status": 429, }, )(), content=b'Internal Server Error', ), {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None self.cloudsql_hook.create_database(instance='instance', body={}) assert 1 == mock_get_credentials.call_count assert 2 == insert_method.call_count assert 2 == execute_method.call_count wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_patch_database(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): patch_method = get_conn.return_value.databases.return_value.patch execute_method = patch_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.patch_database(instance='instance', database='database', body={}) patch_method.assert_called_once_with( body={}, database='database', instance='instance', project='example-project' ) execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_patch_database_with_in_progress_retry( self, wait_for_operation_to_complete, get_conn, mock_get_credentials ): patch_method = get_conn.return_value.databases.return_value.patch execute_method = patch_method.return_value.execute execute_method.side_effect = [ HttpError( resp=type( '', (object,), { "status": 429, }, )(), content=b'Internal Server Error', ), {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None self.cloudsql_hook.patch_database(instance='instance', database='database', body={}) assert 1 == mock_get_credentials.call_count assert 2 == patch_method.call_count assert 2 == execute_method.call_count wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_delete_database(self, wait_for_operation_to_complete, get_conn, mock_get_credentials): delete_method = get_conn.return_value.databases.return_value.delete execute_method = delete_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook.delete_database(instance='instance', database='database') delete_method.assert_called_once_with( database='database', instance='instance', project='example-project' ) execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) assert 1 == mock_get_credentials.call_count @mock.patch( 'airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_credentials_and_project_id', return_value=(mock.MagicMock(), 'example-project'), ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_delete_database_with_in_progress_retry( self, wait_for_operation_to_complete, get_conn, mock_get_credentials ): delete_method = get_conn.return_value.databases.return_value.delete execute_method = delete_method.return_value.execute execute_method.side_effect = [ HttpError( resp=type( '', (object,), { "status": 429, }, )(), content=b'Internal Server Error', ), {"name": "operation_id"}, ] wait_for_operation_to_complete.return_value = None self.cloudsql_hook.delete_database(instance='instance', database='database') assert 1 == mock_get_credentials.call_count assert 2 == delete_method.call_count assert 2 == execute_method.call_count wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) class TestGcpSqlHookNoDefaultProjectID(unittest.TestCase): def setUp(self): with mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.__init__', new=mock_base_gcp_hook_no_default_project_id, ): self.cloudsql_hook_no_default_project_id = CloudSQLHook(api_version='v1', gcp_conn_id='test') @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_instance_import_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): import_method = get_conn.return_value.instances.return_value.import_ execute_method = import_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.import_instance( project_id='example-project', instance='instance', body={} ) import_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( project_id='example-project', operation_name='operation_id' ) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_instance_export_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): export_method = get_conn.return_value.instances.return_value.export execute_method = export_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.export_instance( project_id='example-project', instance='instance', body={} ) export_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( project_id='example-project', operation_name='operation_id' ) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_get_instance_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): get_method = get_conn.return_value.instances.return_value.get execute_method = get_method.return_value.execute execute_method.return_value = {"name": "instance"} wait_for_operation_to_complete.return_value = None res = self.cloudsql_hook_no_default_project_id.get_instance( project_id='example-project', instance='instance' ) assert res is not None assert 'instance' == res['name'] get_method.assert_called_once_with(instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_not_called() @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_create_instance_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): insert_method = get_conn.return_value.instances.return_value.insert execute_method = insert_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.create_instance(project_id='example-project', body={}) insert_method.assert_called_once_with(body={}, project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_patch_instance_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): patch_method = get_conn.return_value.instances.return_value.patch execute_method = patch_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.patch_instance( project_id='example-project', instance='instance', body={} ) patch_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_delete_instance_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): delete_method = get_conn.return_value.instances.return_value.delete execute_method = delete_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.delete_instance( project_id='example-project', instance='instance' ) delete_method.assert_called_once_with(instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_get_database_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): get_method = get_conn.return_value.databases.return_value.get execute_method = get_method.return_value.execute execute_method.return_value = {"name": "database"} wait_for_operation_to_complete.return_value = None res = self.cloudsql_hook_no_default_project_id.get_database( project_id='example-project', database='database', instance='instance' ) assert res is not None assert 'database' == res['name'] get_method.assert_called_once_with( instance='instance', database='database', project='example-project' ) execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_not_called() @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_create_database_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): insert_method = get_conn.return_value.databases.return_value.insert execute_method = insert_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.create_database( project_id='example-project', instance='instance', body={} ) insert_method.assert_called_once_with(body={}, instance='instance', project='example-project') execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_patch_database_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): patch_method = get_conn.return_value.databases.return_value.patch execute_method = patch_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.patch_database( project_id='example-project', instance='instance', database='database', body={} ) patch_method.assert_called_once_with( body={}, database='database', instance='instance', project='example-project' ) execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) @mock.patch( 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook.project_id', new_callable=PropertyMock, return_value=None, ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook.get_conn') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLHook._wait_for_operation_to_complete') def test_delete_database_overridden_project_id( self, wait_for_operation_to_complete, get_conn, mock_project_id ): delete_method = get_conn.return_value.databases.return_value.delete execute_method = delete_method.return_value.execute execute_method.return_value = {"name": "operation_id"} wait_for_operation_to_complete.return_value = None self.cloudsql_hook_no_default_project_id.delete_database( project_id='example-project', instance='instance', database='database' ) delete_method.assert_called_once_with( database='database', instance='instance', project='example-project' ) execute_method.assert_called_once_with(num_retries=5) wait_for_operation_to_complete.assert_called_once_with( operation_name='operation_id', project_id='example-project' ) class TestCloudSqlDatabaseHook(unittest.TestCase): @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_validate_ssl_certs_no_ssl(self, get_connection): connection = Connection() connection.set_extra( json.dumps({"location": "test", "instance": "instance", "database_type": "postgres"}) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) hook.validate_ssl_certs() @parameterized.expand( [ [{}], [{"sslcert": "cert_file.pem"}], [{"sslkey": "cert_key.pem"}], [{"sslrootcert": "root_cert_file.pem"}], [{"sslcert": "cert_file.pem", "sslkey": "cert_key.pem"}], [{"sslrootcert": "root_cert_file.pem", "sslkey": "cert_key.pem"}], [{"sslrootcert": "root_cert_file.pem", "sslcert": "cert_file.pem"}], ] ) @mock.patch('os.path.isfile') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_validate_ssl_certs_missing_cert_params( self, cert_dict, get_connection, mock_is_file ): mock_is_file.side_effects = True connection = Connection() extras = {"location": "test", "instance": "instance", "database_type": "postgres", "use_ssl": "True"} extras.update(cert_dict) connection.set_extra(json.dumps(extras)) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) with pytest.raises(AirflowException) as ctx: hook.validate_ssl_certs() err = ctx.value assert "SSL connections requires" in str(err) @mock.patch('os.path.isfile') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_validate_ssl_certs_with_ssl(self, get_connection, mock_is_file): connection = Connection() mock_is_file.return_value = True connection.set_extra( json.dumps( { "location": "test", "instance": "instance", "database_type": "postgres", "use_ssl": "True", "sslcert": "cert_file.pem", "sslrootcert": "rootcert_file.pem", "sslkey": "key_file.pem", } ) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) hook.validate_ssl_certs() @mock.patch('os.path.isfile') @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_validate_ssl_certs_with_ssl_files_not_readable( self, get_connection, mock_is_file ): connection = Connection() mock_is_file.return_value = False connection.set_extra( json.dumps( { "location": "test", "instance": "instance", "database_type": "postgres", "use_ssl": "True", "sslcert": "cert_file.pem", "sslrootcert": "rootcert_file.pem", "sslkey": "key_file.pem", } ) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) with pytest.raises(AirflowException) as ctx: hook.validate_ssl_certs() err = ctx.value assert "must be a readable file" in str(err) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_validate_socket_path_length_too_long(self, get_connection): connection = Connection() connection.set_extra( json.dumps( { "location": "test", "instance": "very_long_instance_name_that_will_be_too_long_to_build_socket_length", "database_type": "postgres", "use_proxy": "True", "use_tcp": "False", } ) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) with pytest.raises(AirflowException) as ctx: hook.validate_socket_path_length() err = ctx.value assert "The UNIX socket path length cannot exceed" in str(err) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_validate_socket_path_length_not_too_long(self, get_connection): connection = Connection() connection.set_extra( json.dumps( { "location": "test", "instance": "short_instance_name", "database_type": "postgres", "use_proxy": "True", "use_tcp": "False", } ) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) hook.validate_socket_path_length() @parameterized.expand( [ ["http://:password@host:80/database"], ["http://user:@host:80/database"], ["http://user:password@/database"], ["http://user:password@host:80/"], ["http://user:password@/"], ["http://host:80/database"], ["http://host:80/"], ] ) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_create_connection_missing_fields(self, uri, get_connection): connection = Connection(uri=uri) params = { "location": "test", "instance": "instance", "database_type": "postgres", 'use_proxy': "True", 'use_tcp': "False", } connection.set_extra(json.dumps(params)) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) with pytest.raises(AirflowException) as ctx: hook.create_connection() err = ctx.value assert "needs to be set in connection" in str(err) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_get_sqlproxy_runner_no_proxy(self, get_connection): connection = Connection(uri="http://user:password@host:80/database") connection.set_extra( json.dumps( { "location": "test", "instance": "instance", "database_type": "postgres", } ) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) with pytest.raises(ValueError) as ctx: hook.get_sqlproxy_runner() err = ctx.value assert 'Proxy runner can only be retrieved in case of use_proxy = True' in str(err) @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_get_sqlproxy_runner(self, get_connection): connection = Connection(uri="http://user:password@host:80/database") connection.set_extra( json.dumps( { "location": "test", "instance": "instance", "database_type": "postgres", 'use_proxy': "True", 'use_tcp': "False", } ) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) hook.create_connection() proxy_runner = hook.get_sqlproxy_runner() assert proxy_runner is not None @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def test_cloudsql_database_hook_get_database_hook(self, get_connection): connection = Connection(uri="http://user:password@host:80/database") connection.set_extra( json.dumps( { "location": "test", "instance": "instance", "database_type": "postgres", } ) ) get_connection.return_value = connection hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='cloudsql_connection', default_gcp_project_id='google_connection' ) connection = hook.create_connection() db_hook = hook.get_database_hook(connection=connection) assert db_hook is not None class TestCloudSqlDatabaseQueryHook(unittest.TestCase): @mock.patch('airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection') def setUp(self, m): super().setUp() self.sql_connection = Connection( conn_id='my_gcp_sql_connection', conn_type='gcpcloudsql', login='login', password='password', host='host', schema='schema', extra='{"database_type":"postgres", "location":"my_location", ' '"instance":"my_instance", "use_proxy": true, ' '"project_id":"my_project"}', ) self.connection = Connection( conn_id='my_gcp_connection', conn_type='google_cloud_platform', ) scopes = [ "https://www.googleapis.com/auth/pubsub", "https://www.googleapis.com/auth/datastore", "https://www.googleapis.com/auth/bigquery", "https://www.googleapis.com/auth/devstorage.read_write", "https://www.googleapis.com/auth/logging.write", "https://www.googleapis.com/auth/cloud-platform", ] conn_extra = { "extra__google_cloud_platform__scope": ",".join(scopes), "extra__google_cloud_platform__project": "your-gcp-project", "extra__google_cloud_platform__key_path": '/var/local/google_cloud_default.json', } conn_extra_json = json.dumps(conn_extra) self.connection.set_extra(conn_extra_json) m.side_effect = [self.sql_connection, self.connection] self.db_hook = CloudSQLDatabaseHook( gcp_cloudsql_conn_id='my_gcp_sql_connection', gcp_conn_id='my_gcp_connection' ) def test_get_sqlproxy_runner(self): self.db_hook._generate_connection_uri() sqlproxy_runner = self.db_hook.get_sqlproxy_runner() assert sqlproxy_runner.gcp_conn_id == self.connection.conn_id project = self.sql_connection.extra_dejson['project_id'] location = self.sql_connection.extra_dejson['location'] instance = self.sql_connection.extra_dejson['instance'] instance_spec = f"{project}:{location}:{instance}" assert sqlproxy_runner.instance_specification == instance_spec @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_not_too_long_unix_socket_path(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=postgres&" "project_id=example-project&location=europe-west1&" "instance=" "test_db_with_longname_but_with_limit_of_UNIX_socket&" "use_proxy=True&sql_proxy_use_tcp=False" ) get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() assert 'postgres' == connection.conn_type assert 'testdb' == connection.schema def _verify_postgres_connection(self, get_connection, uri): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() assert 'postgres' == connection.conn_type assert '127.0.0.1' == connection.host assert 3200 == connection.port assert 'testdb' == connection.schema return connection @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_postgres(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=postgres&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=False&use_ssl=False" ) self._verify_postgres_connection(get_connection, uri) @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_postgres_ssl(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=postgres&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=False&use_ssl=True&sslcert=/bin/bash&" "sslkey=/bin/bash&sslrootcert=/bin/bash" ) connection = self._verify_postgres_connection(get_connection, uri) assert '/bin/bash' == connection.extra_dejson['sslkey'] assert '/bin/bash' == connection.extra_dejson['sslcert'] assert '/bin/bash' == connection.extra_dejson['sslrootcert'] @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_postgres_proxy_socket(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=postgres&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=True&sql_proxy_use_tcp=False" ) get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() assert 'postgres' == connection.conn_type assert '/tmp' in connection.host assert 'example-project:europe-west1:testdb' in connection.host assert connection.port is None assert 'testdb' == connection.schema @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_project_id_missing(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=mysql&" "location=europe-west1&instance=testdb&" "use_proxy=False&use_ssl=False" ) self.verify_mysql_connection(get_connection, uri) def verify_mysql_connection(self, get_connection, uri): get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() assert 'mysql' == connection.conn_type assert '127.0.0.1' == connection.host assert 3200 == connection.port assert 'testdb' == connection.schema return connection @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_postgres_proxy_tcp(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=postgres&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=True&sql_proxy_use_tcp=True" ) get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() assert 'postgres' == connection.conn_type assert '127.0.0.1' == connection.host assert 3200 != connection.port assert 'testdb' == connection.schema @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_mysql(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=mysql&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=False&use_ssl=False" ) self.verify_mysql_connection(get_connection, uri) @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_mysql_ssl(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=mysql&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=False&use_ssl=True&sslcert=/bin/bash&" "sslkey=/bin/bash&sslrootcert=/bin/bash" ) connection = self.verify_mysql_connection(get_connection, uri) assert '/bin/bash' == json.loads(connection.extra_dejson['ssl'])['cert'] assert '/bin/bash' == json.loads(connection.extra_dejson['ssl'])['key'] assert '/bin/bash' == json.loads(connection.extra_dejson['ssl'])['ca'] @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_mysql_proxy_socket(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=mysql&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=True&sql_proxy_use_tcp=False" ) get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() assert 'mysql' == connection.conn_type assert 'localhost' == connection.host assert '/tmp' in connection.extra_dejson['unix_socket'] assert 'example-project:europe-west1:testdb' in connection.extra_dejson['unix_socket'] assert connection.port is None assert 'testdb' == connection.schema @mock.patch("airflow.providers.google.cloud.hooks.cloud_sql.CloudSQLDatabaseHook.get_connection") def test_hook_with_correct_parameters_mysql_tcp(self, get_connection): uri = ( "gcpcloudsql://user:password@127.0.0.1:3200/testdb?database_type=mysql&" "project_id=example-project&location=europe-west1&instance=testdb&" "use_proxy=True&sql_proxy_use_tcp=True" ) get_connection.side_effect = [Connection(uri=uri)] hook = CloudSQLDatabaseHook() connection = hook.create_connection() assert 'mysql' == connection.conn_type assert '127.0.0.1' == connection.host assert 3200 != connection.port assert 'testdb' == connection.schema
{ "content_hash": "a6a64a2d44a666bccc0b5ea974921584", "timestamp": "", "source": "github", "line_count": 1172, "max_line_length": 110, "avg_line_length": 48.02047781569966, "alnum_prop": 0.6441719971570717, "repo_name": "cfei18/incubator-airflow", "id": "9e7bdaa5c79c8de25309053d95082857d6c02154", "size": "57067", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tests/providers/google/cloud/hooks/test_cloud_sql.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "25980" }, { "name": "Dockerfile", "bytes": "72003" }, { "name": "HCL", "bytes": "3786" }, { "name": "HTML", "bytes": "173434" }, { "name": "JavaScript", "bytes": "143068" }, { "name": "Jinja", "bytes": "38808" }, { "name": "Jupyter Notebook", "bytes": "5482" }, { "name": "Mako", "bytes": "1339" }, { "name": "Python", "bytes": "22660683" }, { "name": "R", "bytes": "313" }, { "name": "Shell", "bytes": "312715" }, { "name": "TypeScript", "bytes": "472379" } ], "symlink_target": "" }
""" Class for representing individuals within the `lineage` framework. """ """ MIT License Copyright (c) 2016 Andrew Riha Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import inspect from snps import SNPs from snps.utils import clean_str class Individual(SNPs): """Object used to represent and interact with an individual. The ``Individual`` object maintains information about an individual. The object provides methods for loading an individual's genetic data (SNPs) and normalizing it for use with the `lineage` framework. ``Individual`` inherits from ``snps.SNPs``. See here for details about the ``SNPs`` object: https://snps.readthedocs.io/en/latest/snps.html """ def __init__(self, name, raw_data=(), **kwargs): """Initialize an ``Individual`` object. Parameters ---------- name : str name of the individual raw_data : str, bytes, ``SNPs`` (or list or tuple thereof) path(s) to file(s), bytes, or ``SNPs`` object(s) with raw genotype data **kwargs parameters to ``snps.SNPs`` and/or ``snps.SNPs.merge`` """ self._name = name init_args = self._get_defined_kwargs(SNPs, kwargs) merge_args = self._get_defined_kwargs(SNPs.merge, kwargs) super().__init__(**init_args) # load raw data by merging `SNPs` objects into this object if not isinstance(raw_data, list) and not isinstance(raw_data, tuple): s = ( SNPs(raw_data, **init_args) if not isinstance(raw_data, SNPs) else raw_data ) self.merge([s], **merge_args) else: for file in raw_data: s = file if not isinstance(file, SNPs): s = SNPs(file, **init_args) self.merge([s], **merge_args) def _get_defined_kwargs(self, callable, kwargs): sig = inspect.signature(callable) return {k: kwargs[k] for k in kwargs if k in sig.parameters} def __repr__(self): return "Individual({!r})".format(self._name) @property def name(self): """Get this ``Individual``'s name. Returns ------- str """ return self._name def get_var_name(self): return clean_str(self.name)
{ "content_hash": "b0864580c67bd01f28763e430d37236c", "timestamp": "", "source": "github", "line_count": 97, "max_line_length": 95, "avg_line_length": 34.52577319587629, "alnum_prop": 0.6434756643774261, "repo_name": "apriha/lineage", "id": "102ffcf42a82b372ddee705d1f33ddc029061105", "size": "3349", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/lineage/individual.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "202726" } ], "symlink_target": "" }
from unittest import TestCase from logentries_api.alerts import ( PagerDutyAlertConfig, WebHookAlertConfig, EmailAlertConfig, SlackAlertConfig, HipChatAlertConfig ) class AlertConfigsTests(TestCase): """ Test each alert type """ def test_pagerduty(self): """ Test PagerDuty schema """ service_key = 'bb7aad43abd9401a9e4f065c9e5ab89f' alert = PagerDutyAlertConfig(description='testing', service_key=service_key) self.assertDictEqual( alert.args(), { 'args': { 'description': 'testing', 'service_key': 'bb7aad43abd9401a9e4f065c9e5ab89f' }, 'type': 'pagerduty' } ) def test_email(self): """ Test Email alert """ alert = EmailAlertConfig(address='me@example.com') self.assertDictEqual( alert.args(), { 'args': { 'direct': 'me@example.com', 'teams': '', 'users': '' }, 'type': 'mailto' } ) def test_webhook(self): """ Test WebHook schema """ alert = WebHookAlertConfig(url='https://www.google.com') self.assertDictEqual( alert.args(), { 'args': { 'url': 'https://www.google.com' }, 'type': 'webhook' } ) def test_slack(self): """ Test Slack schema """ alert = SlackAlertConfig(url='https://www.google.com') self.assertDictEqual( alert.args(), { 'args': { 'url': 'https://www.google.com' }, 'type': 'slack' } ) def test_hipchat(self): """ Test HipChat schema """ token = 'bb7aad43abd9401a9e4f065c9e5ab89f' alert = HipChatAlertConfig(token=token, room_name='group') self.assertDictEqual( alert.args(), { 'args': { 'notification_key': token, 'room_name': 'group' }, 'type': 'hipchat' } )
{ "content_hash": "e3e4a550e8072db4aea5fb815f882e52", "timestamp": "", "source": "github", "line_count": 97, "max_line_length": 84, "avg_line_length": 24.649484536082475, "alnum_prop": 0.437892095357591, "repo_name": "ambitioninc/python-logentries-api", "id": "6c4b319c586af232db7fb26bbe28428086bd90d9", "size": "2391", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "logentries_api/tests/test_alerts.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "105035" } ], "symlink_target": "" }
DATABASE = './database.db' JOB_FILE = './job_configuration' try: # This is the path to the upload directory app.config['UPLOAD_FOLDER'] = './uploads/' app.config['MAX_CONTENT_LENGTH'] = 60 * 1024 * 1024 except: pass
{ "content_hash": "512f6c69babb718362c9b14834dce010", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 55, "avg_line_length": 25.88888888888889, "alnum_prop": 0.6437768240343348, "repo_name": "DCGM/EmotionService", "id": "ec21cc509674985de6ac0d354cd98d30fda17501", "size": "233", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "service/config.py", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "HTML", "bytes": "4437" }, { "name": "JavaScript", "bytes": "41023" }, { "name": "Python", "bytes": "92834" }, { "name": "Shell", "bytes": "9000" } ], "symlink_target": "" }
import os import sys import subprocess from os import access, getenv, X_OK jar_file = 'hops0.33.jar' default_jvm_mem_opts = ['-Xms1g', '-Xmx2g'] # !!! End of parameter section. No user-serviceable code below this line !!! def real_dirname(path): """Return the symlink-resolved, canonicalized directory-portion of path.""" return os.path.dirname(os.path.realpath(path)) def java_executable(): """Return the executable name of the Java interpreter.""" java_home = getenv('JAVA_HOME') java_bin = os.path.join('bin', 'java') if java_home and access(os.path.join(java_home, java_bin), X_OK): return os.path.join(java_home, java_bin) else: return 'java' def jvm_opts(argv): """Construct list of Java arguments based on our argument list. The argument list passed in argv must not include the script name. The return value is a 3-tuple lists of strings of the form: (memory_options, prop_options, passthrough_options) """ mem_opts = [] prop_opts = [] pass_args = [] for arg in argv: if arg.startswith('-D'): prop_opts.append(arg) elif arg.startswith('-XX'): prop_opts.append(arg) elif arg.startswith('-Xm'): mem_opts.append(arg) else: pass_args.append(arg) # In the original shell script the test coded below read: # if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ] # To reproduce the behaviour of the above shell code fragment # it is important to explictly check for equality with None # in the second condition, so a null envar value counts as True! if mem_opts == [] and getenv('_JAVA_OPTIONS') == None: mem_opts = default_jvm_mem_opts return (mem_opts, prop_opts, pass_args) def main(): java = java_executable() jar_dir = real_dirname(sys.argv[0]) (mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:]) if pass_args != [] and pass_args[0].startswith('eu'): jar_arg = '-cp' else: jar_arg = '-jar' jar_path = os.path.join(jar_dir, jar_file) java_args = [java]+ mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args if '--jar_dir' in sys.argv[1:]: print(jar_path) else: sys.exit(subprocess.call(java_args)) if __name__ == '__main__': main()
{ "content_hash": "42223a0f98c781286b95ff0184388307", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 81, "avg_line_length": 28.950617283950617, "alnum_prop": 0.6132196162046908, "repo_name": "Luobiny/bioconda-recipes", "id": "fa77e3c82a73284ab0d80fa3b63015d0cf49c08c", "size": "2649", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "recipes/hops/hops.py", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "154" }, { "name": "CMake", "bytes": "13967" }, { "name": "M4", "bytes": "726" }, { "name": "Perl", "bytes": "99771" }, { "name": "Prolog", "bytes": "1067" }, { "name": "Python", "bytes": "393551" }, { "name": "Raku", "bytes": "23942" }, { "name": "Roff", "bytes": "996" }, { "name": "Shell", "bytes": "3972568" } ], "symlink_target": "" }
from ._models_py3 import AadAuthenticationParameters from ._models_py3 import AddressSpace from ._models_py3 import ApplicationGateway from ._models_py3 import ApplicationGatewayAuthenticationCertificate from ._models_py3 import ApplicationGatewayAutoscaleConfiguration from ._models_py3 import ApplicationGatewayAvailableSslOptions from ._models_py3 import ApplicationGatewayAvailableSslPredefinedPolicies from ._models_py3 import ApplicationGatewayAvailableWafRuleSetsResult from ._models_py3 import ApplicationGatewayBackendAddress from ._models_py3 import ApplicationGatewayBackendAddressPool from ._models_py3 import ApplicationGatewayBackendHealth from ._models_py3 import ApplicationGatewayBackendHealthHttpSettings from ._models_py3 import ApplicationGatewayBackendHealthOnDemand from ._models_py3 import ApplicationGatewayBackendHealthPool from ._models_py3 import ApplicationGatewayBackendHealthServer from ._models_py3 import ApplicationGatewayBackendHttpSettings from ._models_py3 import ApplicationGatewayConnectionDraining from ._models_py3 import ApplicationGatewayCustomError from ._models_py3 import ApplicationGatewayFirewallDisabledRuleGroup from ._models_py3 import ApplicationGatewayFirewallExclusion from ._models_py3 import ApplicationGatewayFirewallRule from ._models_py3 import ApplicationGatewayFirewallRuleGroup from ._models_py3 import ApplicationGatewayFirewallRuleSet from ._models_py3 import ApplicationGatewayFrontendIPConfiguration from ._models_py3 import ApplicationGatewayFrontendPort from ._models_py3 import ApplicationGatewayHeaderConfiguration from ._models_py3 import ApplicationGatewayHttpListener from ._models_py3 import ApplicationGatewayIPConfiguration from ._models_py3 import ApplicationGatewayListResult from ._models_py3 import ApplicationGatewayOnDemandProbe from ._models_py3 import ApplicationGatewayPathRule from ._models_py3 import ApplicationGatewayPrivateEndpointConnection from ._models_py3 import ApplicationGatewayPrivateEndpointConnectionListResult from ._models_py3 import ApplicationGatewayPrivateLinkConfiguration from ._models_py3 import ApplicationGatewayPrivateLinkIpConfiguration from ._models_py3 import ApplicationGatewayPrivateLinkResource from ._models_py3 import ApplicationGatewayPrivateLinkResourceListResult from ._models_py3 import ApplicationGatewayProbe from ._models_py3 import ApplicationGatewayProbeHealthResponseMatch from ._models_py3 import ApplicationGatewayRedirectConfiguration from ._models_py3 import ApplicationGatewayRequestRoutingRule from ._models_py3 import ApplicationGatewayRewriteRule from ._models_py3 import ApplicationGatewayRewriteRuleActionSet from ._models_py3 import ApplicationGatewayRewriteRuleCondition from ._models_py3 import ApplicationGatewayRewriteRuleSet from ._models_py3 import ApplicationGatewaySku from ._models_py3 import ApplicationGatewaySslCertificate from ._models_py3 import ApplicationGatewaySslPolicy from ._models_py3 import ApplicationGatewaySslPredefinedPolicy from ._models_py3 import ApplicationGatewayTrustedRootCertificate from ._models_py3 import ApplicationGatewayUrlConfiguration from ._models_py3 import ApplicationGatewayUrlPathMap from ._models_py3 import ApplicationGatewayWebApplicationFirewallConfiguration from ._models_py3 import ApplicationRule from ._models_py3 import ApplicationSecurityGroup from ._models_py3 import ApplicationSecurityGroupListResult from ._models_py3 import AuthorizationListResult from ._models_py3 import AutoApprovedPrivateLinkService from ._models_py3 import AutoApprovedPrivateLinkServicesResult from ._models_py3 import Availability from ._models_py3 import AvailableDelegation from ._models_py3 import AvailableDelegationsResult from ._models_py3 import AvailablePrivateEndpointType from ._models_py3 import AvailablePrivateEndpointTypesResult from ._models_py3 import AvailableProvidersList from ._models_py3 import AvailableProvidersListCity from ._models_py3 import AvailableProvidersListCountry from ._models_py3 import AvailableProvidersListParameters from ._models_py3 import AvailableProvidersListState from ._models_py3 import AvailableServiceAlias from ._models_py3 import AvailableServiceAliasesResult from ._models_py3 import AzureAsyncOperationResult from ._models_py3 import AzureFirewall from ._models_py3 import AzureFirewallApplicationRule from ._models_py3 import AzureFirewallApplicationRuleCollection from ._models_py3 import AzureFirewallApplicationRuleProtocol from ._models_py3 import AzureFirewallFqdnTag from ._models_py3 import AzureFirewallFqdnTagListResult from ._models_py3 import AzureFirewallIPConfiguration from ._models_py3 import AzureFirewallIpGroups from ._models_py3 import AzureFirewallListResult from ._models_py3 import AzureFirewallNatRCAction from ._models_py3 import AzureFirewallNatRule from ._models_py3 import AzureFirewallNatRuleCollection from ._models_py3 import AzureFirewallNetworkRule from ._models_py3 import AzureFirewallNetworkRuleCollection from ._models_py3 import AzureFirewallPublicIPAddress from ._models_py3 import AzureFirewallRCAction from ._models_py3 import AzureFirewallSku from ._models_py3 import AzureReachabilityReport from ._models_py3 import AzureReachabilityReportItem from ._models_py3 import AzureReachabilityReportLatencyInfo from ._models_py3 import AzureReachabilityReportLocation from ._models_py3 import AzureReachabilityReportParameters from ._models_py3 import BGPCommunity from ._models_py3 import BackendAddressPool from ._models_py3 import BastionActiveSession from ._models_py3 import BastionActiveSessionListResult from ._models_py3 import BastionHost from ._models_py3 import BastionHostIPConfiguration from ._models_py3 import BastionHostListResult from ._models_py3 import BastionSessionDeleteResult from ._models_py3 import BastionSessionState from ._models_py3 import BastionShareableLink from ._models_py3 import BastionShareableLinkListRequest from ._models_py3 import BastionShareableLinkListResult from ._models_py3 import BgpConnection from ._models_py3 import BgpPeerStatus from ._models_py3 import BgpPeerStatusListResult from ._models_py3 import BgpServiceCommunity from ._models_py3 import BgpServiceCommunityListResult from ._models_py3 import BgpSettings from ._models_py3 import BreakOutCategoryPolicies from ._models_py3 import CheckPrivateLinkServiceVisibilityRequest from ._models_py3 import CloudErrorBody from ._models_py3 import ( Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties, ) from ._models_py3 import ConnectionMonitor from ._models_py3 import ConnectionMonitorDestination from ._models_py3 import ConnectionMonitorEndpoint from ._models_py3 import ConnectionMonitorEndpointFilter from ._models_py3 import ConnectionMonitorEndpointFilterItem from ._models_py3 import ConnectionMonitorHttpConfiguration from ._models_py3 import ConnectionMonitorIcmpConfiguration from ._models_py3 import ConnectionMonitorListResult from ._models_py3 import ConnectionMonitorOutput from ._models_py3 import ConnectionMonitorParameters from ._models_py3 import ConnectionMonitorQueryResult from ._models_py3 import ConnectionMonitorResult from ._models_py3 import ConnectionMonitorResultProperties from ._models_py3 import ConnectionMonitorSource from ._models_py3 import ConnectionMonitorSuccessThreshold from ._models_py3 import ConnectionMonitorTcpConfiguration from ._models_py3 import ConnectionMonitorTestConfiguration from ._models_py3 import ConnectionMonitorTestGroup from ._models_py3 import ConnectionMonitorWorkspaceSettings from ._models_py3 import ConnectionResetSharedKey from ._models_py3 import ConnectionSharedKey from ._models_py3 import ConnectionStateSnapshot from ._models_py3 import ConnectivityDestination from ._models_py3 import ConnectivityHop from ._models_py3 import ConnectivityInformation from ._models_py3 import ConnectivityIssue from ._models_py3 import ConnectivityParameters from ._models_py3 import ConnectivitySource from ._models_py3 import Container from ._models_py3 import ContainerNetworkInterface from ._models_py3 import ContainerNetworkInterfaceConfiguration from ._models_py3 import ContainerNetworkInterfaceIpConfiguration from ._models_py3 import CustomDnsConfigPropertiesFormat from ._models_py3 import DdosCustomPolicy from ._models_py3 import DdosProtectionPlan from ._models_py3 import DdosProtectionPlanListResult from ._models_py3 import DdosSettings from ._models_py3 import Delegation from ._models_py3 import DeviceProperties from ._models_py3 import DhcpOptions from ._models_py3 import Dimension from ._models_py3 import DnsNameAvailabilityResult from ._models_py3 import DnsSettings from ._models_py3 import EffectiveNetworkSecurityGroup from ._models_py3 import EffectiveNetworkSecurityGroupAssociation from ._models_py3 import EffectiveNetworkSecurityGroupListResult from ._models_py3 import EffectiveNetworkSecurityRule from ._models_py3 import EffectiveRoute from ._models_py3 import EffectiveRouteListResult from ._models_py3 import EffectiveRoutesParameters from ._models_py3 import EndpointServiceResult from ._models_py3 import EndpointServicesListResult from ._models_py3 import Error from ._models_py3 import ErrorDetails from ._models_py3 import ErrorResponse from ._models_py3 import EvaluatedNetworkSecurityGroup from ._models_py3 import ExpressRouteCircuit from ._models_py3 import ExpressRouteCircuitArpTable from ._models_py3 import ExpressRouteCircuitAuthorization from ._models_py3 import ExpressRouteCircuitConnection from ._models_py3 import ExpressRouteCircuitConnectionListResult from ._models_py3 import ExpressRouteCircuitListResult from ._models_py3 import ExpressRouteCircuitPeering from ._models_py3 import ExpressRouteCircuitPeeringConfig from ._models_py3 import ExpressRouteCircuitPeeringId from ._models_py3 import ExpressRouteCircuitPeeringListResult from ._models_py3 import ExpressRouteCircuitReference from ._models_py3 import ExpressRouteCircuitRoutesTable from ._models_py3 import ExpressRouteCircuitRoutesTableSummary from ._models_py3 import ExpressRouteCircuitServiceProviderProperties from ._models_py3 import ExpressRouteCircuitSku from ._models_py3 import ExpressRouteCircuitStats from ._models_py3 import ExpressRouteCircuitsArpTableListResult from ._models_py3 import ExpressRouteCircuitsRoutesTableListResult from ._models_py3 import ExpressRouteCircuitsRoutesTableSummaryListResult from ._models_py3 import ExpressRouteConnection from ._models_py3 import ExpressRouteConnectionId from ._models_py3 import ExpressRouteConnectionList from ._models_py3 import ExpressRouteCrossConnection from ._models_py3 import ExpressRouteCrossConnectionListResult from ._models_py3 import ExpressRouteCrossConnectionPeering from ._models_py3 import ExpressRouteCrossConnectionPeeringList from ._models_py3 import ExpressRouteCrossConnectionRoutesTableSummary from ._models_py3 import ExpressRouteCrossConnectionsRoutesTableSummaryListResult from ._models_py3 import ExpressRouteGateway from ._models_py3 import ExpressRouteGatewayList from ._models_py3 import ExpressRouteGatewayPropertiesAutoScaleConfiguration from ._models_py3 import ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds from ._models_py3 import ExpressRouteLink from ._models_py3 import ExpressRouteLinkListResult from ._models_py3 import ExpressRouteLinkMacSecConfig from ._models_py3 import ExpressRoutePort from ._models_py3 import ExpressRoutePortListResult from ._models_py3 import ExpressRoutePortsLocation from ._models_py3 import ExpressRoutePortsLocationBandwidths from ._models_py3 import ExpressRoutePortsLocationListResult from ._models_py3 import ExpressRouteServiceProvider from ._models_py3 import ExpressRouteServiceProviderBandwidthsOffered from ._models_py3 import ExpressRouteServiceProviderListResult from ._models_py3 import FirewallPolicy from ._models_py3 import FirewallPolicyFilterRuleCollection from ._models_py3 import FirewallPolicyFilterRuleCollectionAction from ._models_py3 import FirewallPolicyListResult from ._models_py3 import FirewallPolicyNatRuleCollection from ._models_py3 import FirewallPolicyNatRuleCollectionAction from ._models_py3 import FirewallPolicyRule from ._models_py3 import FirewallPolicyRuleApplicationProtocol from ._models_py3 import FirewallPolicyRuleCollection from ._models_py3 import FirewallPolicyRuleCollectionGroup from ._models_py3 import FirewallPolicyRuleCollectionGroupListResult from ._models_py3 import FirewallPolicyThreatIntelWhitelist from ._models_py3 import FlowLog from ._models_py3 import FlowLogFormatParameters from ._models_py3 import FlowLogInformation from ._models_py3 import FlowLogListResult from ._models_py3 import FlowLogStatusParameters from ._models_py3 import FrontendIPConfiguration from ._models_py3 import GatewayRoute from ._models_py3 import GatewayRouteListResult from ._models_py3 import GetVpnSitesConfigurationRequest from ._models_py3 import HTTPConfiguration from ._models_py3 import HTTPHeader from ._models_py3 import HopLink from ._models_py3 import HubIPAddresses from ._models_py3 import HubIpConfiguration from ._models_py3 import HubPublicIPAddresses from ._models_py3 import HubRoute from ._models_py3 import HubRouteTable from ._models_py3 import HubVirtualNetworkConnection from ._models_py3 import IPAddressAvailabilityResult from ._models_py3 import IPConfiguration from ._models_py3 import IPConfigurationBgpPeeringAddress from ._models_py3 import IPConfigurationProfile from ._models_py3 import InboundNatPool from ._models_py3 import InboundNatRule from ._models_py3 import InboundNatRuleListResult from ._models_py3 import IpAllocation from ._models_py3 import IpAllocationListResult from ._models_py3 import IpGroup from ._models_py3 import IpGroupListResult from ._models_py3 import IpTag from ._models_py3 import IpsecPolicy from ._models_py3 import Ipv6CircuitConnectionConfig from ._models_py3 import Ipv6ExpressRouteCircuitPeeringConfig from ._models_py3 import ListHubRouteTablesResult from ._models_py3 import ListHubVirtualNetworkConnectionsResult from ._models_py3 import ListP2SVpnGatewaysResult from ._models_py3 import ListVirtualHubBgpConnectionResults from ._models_py3 import ListVirtualHubIpConfigurationResults from ._models_py3 import ListVirtualHubRouteTableV2SResult from ._models_py3 import ListVirtualHubsResult from ._models_py3 import ListVirtualWANsResult from ._models_py3 import ListVpnConnectionsResult from ._models_py3 import ListVpnGatewaysResult from ._models_py3 import ListVpnServerConfigurationsResult from ._models_py3 import ListVpnSiteLinkConnectionsResult from ._models_py3 import ListVpnSiteLinksResult from ._models_py3 import ListVpnSitesResult from ._models_py3 import LoadBalancer from ._models_py3 import LoadBalancerBackendAddress from ._models_py3 import LoadBalancerBackendAddressPoolListResult from ._models_py3 import LoadBalancerFrontendIPConfigurationListResult from ._models_py3 import LoadBalancerListResult from ._models_py3 import LoadBalancerLoadBalancingRuleListResult from ._models_py3 import LoadBalancerOutboundRuleListResult from ._models_py3 import LoadBalancerProbeListResult from ._models_py3 import LoadBalancerSku from ._models_py3 import LoadBalancingRule from ._models_py3 import LocalNetworkGateway from ._models_py3 import LocalNetworkGatewayListResult from ._models_py3 import LogSpecification from ._models_py3 import ManagedRuleGroupOverride from ._models_py3 import ManagedRuleOverride from ._models_py3 import ManagedRuleSet from ._models_py3 import ManagedRulesDefinition from ._models_py3 import ManagedServiceIdentity from ._models_py3 import MatchCondition from ._models_py3 import MatchVariable from ._models_py3 import MatchedRule from ._models_py3 import MetricSpecification from ._models_py3 import NatGateway from ._models_py3 import NatGatewayListResult from ._models_py3 import NatGatewaySku from ._models_py3 import NatRule from ._models_py3 import NetworkConfigurationDiagnosticParameters from ._models_py3 import NetworkConfigurationDiagnosticProfile from ._models_py3 import NetworkConfigurationDiagnosticResponse from ._models_py3 import NetworkConfigurationDiagnosticResult from ._models_py3 import NetworkIntentPolicy from ._models_py3 import NetworkIntentPolicyConfiguration from ._models_py3 import NetworkInterface from ._models_py3 import NetworkInterfaceAssociation from ._models_py3 import NetworkInterfaceDnsSettings from ._models_py3 import NetworkInterfaceIPConfiguration from ._models_py3 import NetworkInterfaceIPConfigurationListResult from ._models_py3 import NetworkInterfaceIPConfigurationPrivateLinkConnectionProperties from ._models_py3 import NetworkInterfaceListResult from ._models_py3 import NetworkInterfaceLoadBalancerListResult from ._models_py3 import NetworkInterfaceTapConfiguration from ._models_py3 import NetworkInterfaceTapConfigurationListResult from ._models_py3 import NetworkProfile from ._models_py3 import NetworkProfileListResult from ._models_py3 import NetworkRule from ._models_py3 import NetworkSecurityGroup from ._models_py3 import NetworkSecurityGroupListResult from ._models_py3 import NetworkSecurityGroupResult from ._models_py3 import NetworkSecurityRulesEvaluationResult from ._models_py3 import NetworkVirtualAppliance from ._models_py3 import NetworkVirtualApplianceListResult from ._models_py3 import NetworkVirtualApplianceSiteListResult from ._models_py3 import NetworkVirtualApplianceSku from ._models_py3 import NetworkVirtualApplianceSkuInstances from ._models_py3 import NetworkVirtualApplianceSkuListResult from ._models_py3 import NetworkWatcher from ._models_py3 import NetworkWatcherListResult from ._models_py3 import NextHopParameters from ._models_py3 import NextHopResult from ._models_py3 import Office365PolicyProperties from ._models_py3 import Operation from ._models_py3 import OperationDisplay from ._models_py3 import OperationListResult from ._models_py3 import OperationPropertiesFormatServiceSpecification from ._models_py3 import OutboundRule from ._models_py3 import OwaspCrsExclusionEntry from ._models_py3 import P2SConnectionConfiguration from ._models_py3 import P2SVpnConnectionHealth from ._models_py3 import P2SVpnConnectionHealthRequest from ._models_py3 import P2SVpnConnectionRequest from ._models_py3 import P2SVpnGateway from ._models_py3 import P2SVpnProfileParameters from ._models_py3 import PacketCapture from ._models_py3 import PacketCaptureFilter from ._models_py3 import PacketCaptureListResult from ._models_py3 import PacketCaptureParameters from ._models_py3 import PacketCaptureQueryStatusResult from ._models_py3 import PacketCaptureResult from ._models_py3 import PacketCaptureResultProperties from ._models_py3 import PacketCaptureStorageLocation from ._models_py3 import PatchRouteFilter from ._models_py3 import PatchRouteFilterRule from ._models_py3 import PeerExpressRouteCircuitConnection from ._models_py3 import PeerExpressRouteCircuitConnectionListResult from ._models_py3 import PolicySettings from ._models_py3 import PrepareNetworkPoliciesRequest from ._models_py3 import PrivateDnsZoneConfig from ._models_py3 import PrivateDnsZoneGroup from ._models_py3 import PrivateDnsZoneGroupListResult from ._models_py3 import PrivateEndpoint from ._models_py3 import PrivateEndpointConnection from ._models_py3 import PrivateEndpointConnectionListResult from ._models_py3 import PrivateEndpointListResult from ._models_py3 import PrivateLinkService from ._models_py3 import PrivateLinkServiceConnection from ._models_py3 import PrivateLinkServiceConnectionState from ._models_py3 import PrivateLinkServiceIpConfiguration from ._models_py3 import PrivateLinkServiceListResult from ._models_py3 import PrivateLinkServicePropertiesAutoApproval from ._models_py3 import PrivateLinkServicePropertiesVisibility from ._models_py3 import PrivateLinkServiceVisibility from ._models_py3 import Probe from ._models_py3 import PropagatedRouteTable from ._models_py3 import ProtocolConfiguration from ._models_py3 import ProtocolCustomSettingsFormat from ._models_py3 import PublicIPAddress from ._models_py3 import PublicIPAddressDnsSettings from ._models_py3 import PublicIPAddressListResult from ._models_py3 import PublicIPAddressSku from ._models_py3 import PublicIPPrefix from ._models_py3 import PublicIPPrefixListResult from ._models_py3 import PublicIPPrefixSku from ._models_py3 import QueryTroubleshootingParameters from ._models_py3 import RadiusServer from ._models_py3 import RecordSet from ._models_py3 import ReferencedPublicIpAddress from ._models_py3 import Resource from ._models_py3 import ResourceNavigationLink from ._models_py3 import ResourceNavigationLinksListResult from ._models_py3 import ResourceSet from ._models_py3 import RetentionPolicyParameters from ._models_py3 import Route from ._models_py3 import RouteFilter from ._models_py3 import RouteFilterListResult from ._models_py3 import RouteFilterRule from ._models_py3 import RouteFilterRuleListResult from ._models_py3 import RouteListResult from ._models_py3 import RouteTable from ._models_py3 import RouteTableListResult from ._models_py3 import RoutingConfiguration from ._models_py3 import SecurityGroupNetworkInterface from ._models_py3 import SecurityGroupViewParameters from ._models_py3 import SecurityGroupViewResult from ._models_py3 import SecurityPartnerProvider from ._models_py3 import SecurityPartnerProviderListResult from ._models_py3 import SecurityRule from ._models_py3 import SecurityRuleAssociations from ._models_py3 import SecurityRuleListResult from ._models_py3 import ServiceAssociationLink from ._models_py3 import ServiceAssociationLinksListResult from ._models_py3 import ServiceEndpointPolicy from ._models_py3 import ServiceEndpointPolicyDefinition from ._models_py3 import ServiceEndpointPolicyDefinitionListResult from ._models_py3 import ServiceEndpointPolicyListResult from ._models_py3 import ServiceEndpointPropertiesFormat from ._models_py3 import ServiceTagInformation from ._models_py3 import ServiceTagInformationPropertiesFormat from ._models_py3 import ServiceTagsListResult from ._models_py3 import SessionIds from ._models_py3 import StaticRoute from ._models_py3 import SubResource from ._models_py3 import Subnet from ._models_py3 import SubnetAssociation from ._models_py3 import SubnetListResult from ._models_py3 import TagsObject from ._models_py3 import Topology from ._models_py3 import TopologyAssociation from ._models_py3 import TopologyParameters from ._models_py3 import TopologyResource from ._models_py3 import TrafficAnalyticsConfigurationProperties from ._models_py3 import TrafficAnalyticsProperties from ._models_py3 import TrafficSelectorPolicy from ._models_py3 import TroubleshootingDetails from ._models_py3 import TroubleshootingParameters from ._models_py3 import TroubleshootingRecommendedActions from ._models_py3 import TroubleshootingResult from ._models_py3 import TunnelConnectionHealth from ._models_py3 import UnprepareNetworkPoliciesRequest from ._models_py3 import Usage from ._models_py3 import UsageName from ._models_py3 import UsagesListResult from ._models_py3 import VM from ._models_py3 import VerificationIPFlowParameters from ._models_py3 import VerificationIPFlowResult from ._models_py3 import VirtualApplianceNicProperties from ._models_py3 import VirtualApplianceSite from ._models_py3 import VirtualApplianceSkuProperties from ._models_py3 import VirtualHub from ._models_py3 import VirtualHubEffectiveRoute from ._models_py3 import VirtualHubEffectiveRouteList from ._models_py3 import VirtualHubId from ._models_py3 import VirtualHubRoute from ._models_py3 import VirtualHubRouteTable from ._models_py3 import VirtualHubRouteTableV2 from ._models_py3 import VirtualHubRouteV2 from ._models_py3 import VirtualNetwork from ._models_py3 import VirtualNetworkBgpCommunities from ._models_py3 import VirtualNetworkConnectionGatewayReference from ._models_py3 import VirtualNetworkGateway from ._models_py3 import VirtualNetworkGatewayConnection from ._models_py3 import VirtualNetworkGatewayConnectionListEntity from ._models_py3 import VirtualNetworkGatewayConnectionListResult from ._models_py3 import VirtualNetworkGatewayIPConfiguration from ._models_py3 import VirtualNetworkGatewayListConnectionsResult from ._models_py3 import VirtualNetworkGatewayListResult from ._models_py3 import VirtualNetworkGatewaySku from ._models_py3 import VirtualNetworkListResult from ._models_py3 import VirtualNetworkListUsageResult from ._models_py3 import VirtualNetworkPeering from ._models_py3 import VirtualNetworkPeeringListResult from ._models_py3 import VirtualNetworkTap from ._models_py3 import VirtualNetworkTapListResult from ._models_py3 import VirtualNetworkUsage from ._models_py3 import VirtualNetworkUsageName from ._models_py3 import VirtualRouter from ._models_py3 import VirtualRouterListResult from ._models_py3 import VirtualRouterPeering from ._models_py3 import VirtualRouterPeeringListResult from ._models_py3 import VirtualWAN from ._models_py3 import VirtualWanSecurityProvider from ._models_py3 import VirtualWanSecurityProviders from ._models_py3 import VirtualWanVpnProfileParameters from ._models_py3 import VnetRoute from ._models_py3 import VpnClientConfiguration from ._models_py3 import VpnClientConnectionHealth from ._models_py3 import VpnClientConnectionHealthDetail from ._models_py3 import VpnClientConnectionHealthDetailListResult from ._models_py3 import VpnClientIPsecParameters from ._models_py3 import VpnClientParameters from ._models_py3 import VpnClientRevokedCertificate from ._models_py3 import VpnClientRootCertificate from ._models_py3 import VpnConnection from ._models_py3 import VpnDeviceScriptParameters from ._models_py3 import VpnGateway from ._models_py3 import VpnLinkBgpSettings from ._models_py3 import VpnLinkProviderProperties from ._models_py3 import VpnPacketCaptureStartParameters from ._models_py3 import VpnPacketCaptureStopParameters from ._models_py3 import VpnProfileResponse from ._models_py3 import VpnServerConfigRadiusClientRootCertificate from ._models_py3 import VpnServerConfigRadiusServerRootCertificate from ._models_py3 import VpnServerConfigVpnClientRevokedCertificate from ._models_py3 import VpnServerConfigVpnClientRootCertificate from ._models_py3 import VpnServerConfiguration from ._models_py3 import VpnServerConfigurationsResponse from ._models_py3 import VpnSite from ._models_py3 import VpnSiteId from ._models_py3 import VpnSiteLink from ._models_py3 import VpnSiteLinkConnection from ._models_py3 import WebApplicationFirewallCustomRule from ._models_py3 import WebApplicationFirewallPolicy from ._models_py3 import WebApplicationFirewallPolicyListResult from ._network_management_client_enums import Access from ._network_management_client_enums import ApplicationGatewayBackendHealthServerHealth from ._network_management_client_enums import ApplicationGatewayCookieBasedAffinity from ._network_management_client_enums import ApplicationGatewayCustomErrorStatusCode from ._network_management_client_enums import ApplicationGatewayFirewallMode from ._network_management_client_enums import ApplicationGatewayOperationalState from ._network_management_client_enums import ApplicationGatewayProtocol from ._network_management_client_enums import ApplicationGatewayRedirectType from ._network_management_client_enums import ApplicationGatewayRequestRoutingRuleType from ._network_management_client_enums import ApplicationGatewaySkuName from ._network_management_client_enums import ApplicationGatewaySslCipherSuite from ._network_management_client_enums import ApplicationGatewaySslPolicyName from ._network_management_client_enums import ApplicationGatewaySslPolicyType from ._network_management_client_enums import ApplicationGatewaySslProtocol from ._network_management_client_enums import ApplicationGatewayTier from ._network_management_client_enums import AssociationType from ._network_management_client_enums import AuthenticationMethod from ._network_management_client_enums import AuthorizationUseStatus from ._network_management_client_enums import AzureFirewallApplicationRuleProtocolType from ._network_management_client_enums import AzureFirewallNatRCActionType from ._network_management_client_enums import AzureFirewallNetworkRuleProtocol from ._network_management_client_enums import AzureFirewallRCActionType from ._network_management_client_enums import AzureFirewallSkuName from ._network_management_client_enums import AzureFirewallSkuTier from ._network_management_client_enums import AzureFirewallThreatIntelMode from ._network_management_client_enums import BastionConnectProtocol from ._network_management_client_enums import BgpPeerState from ._network_management_client_enums import CircuitConnectionStatus from ._network_management_client_enums import ConnectionMonitorEndpointFilterItemType from ._network_management_client_enums import ConnectionMonitorEndpointFilterType from ._network_management_client_enums import ConnectionMonitorSourceStatus from ._network_management_client_enums import ConnectionMonitorTestConfigurationProtocol from ._network_management_client_enums import ConnectionMonitorType from ._network_management_client_enums import ConnectionState from ._network_management_client_enums import ConnectionStatus from ._network_management_client_enums import DdosCustomPolicyProtocol from ._network_management_client_enums import DdosCustomPolicyTriggerSensitivityOverride from ._network_management_client_enums import DdosSettingsProtectionCoverage from ._network_management_client_enums import DhGroup from ._network_management_client_enums import Direction from ._network_management_client_enums import EffectiveRouteSource from ._network_management_client_enums import EffectiveRouteState from ._network_management_client_enums import EffectiveSecurityRuleProtocol from ._network_management_client_enums import EvaluationState from ._network_management_client_enums import ExpressRouteCircuitPeeringAdvertisedPublicPrefixState from ._network_management_client_enums import ExpressRouteCircuitPeeringState from ._network_management_client_enums import ExpressRouteCircuitSkuFamily from ._network_management_client_enums import ExpressRouteCircuitSkuTier from ._network_management_client_enums import ExpressRouteLinkAdminState from ._network_management_client_enums import ExpressRouteLinkConnectorType from ._network_management_client_enums import ExpressRouteLinkMacSecCipher from ._network_management_client_enums import ExpressRoutePeeringState from ._network_management_client_enums import ExpressRoutePeeringType from ._network_management_client_enums import ExpressRoutePortsEncapsulation from ._network_management_client_enums import FirewallPolicyFilterRuleCollectionActionType from ._network_management_client_enums import FirewallPolicyNatRuleCollectionActionType from ._network_management_client_enums import FirewallPolicyRuleApplicationProtocolType from ._network_management_client_enums import FirewallPolicyRuleCollectionType from ._network_management_client_enums import FirewallPolicyRuleNetworkProtocol from ._network_management_client_enums import FirewallPolicyRuleType from ._network_management_client_enums import FlowLogFormatType from ._network_management_client_enums import HTTPConfigurationMethod from ._network_management_client_enums import HTTPMethod from ._network_management_client_enums import HubBgpConnectionStatus from ._network_management_client_enums import HubVirtualNetworkConnectionStatus from ._network_management_client_enums import IPAllocationMethod from ._network_management_client_enums import IPVersion from ._network_management_client_enums import IkeEncryption from ._network_management_client_enums import IkeIntegrity from ._network_management_client_enums import IpAllocationType from ._network_management_client_enums import IpFlowProtocol from ._network_management_client_enums import IpsecEncryption from ._network_management_client_enums import IpsecIntegrity from ._network_management_client_enums import IssueType from ._network_management_client_enums import LoadBalancerOutboundRuleProtocol from ._network_management_client_enums import LoadBalancerSkuName from ._network_management_client_enums import LoadDistribution from ._network_management_client_enums import ManagedRuleEnabledState from ._network_management_client_enums import NatGatewaySkuName from ._network_management_client_enums import NetworkOperationStatus from ._network_management_client_enums import NextHopType from ._network_management_client_enums import OfficeTrafficCategory from ._network_management_client_enums import Origin from ._network_management_client_enums import OutputType from ._network_management_client_enums import OwaspCrsExclusionEntryMatchVariable from ._network_management_client_enums import OwaspCrsExclusionEntrySelectorMatchOperator from ._network_management_client_enums import PcError from ._network_management_client_enums import PcProtocol from ._network_management_client_enums import PcStatus from ._network_management_client_enums import PfsGroup from ._network_management_client_enums import PreferredIPVersion from ._network_management_client_enums import ProbeProtocol from ._network_management_client_enums import ProcessorArchitecture from ._network_management_client_enums import Protocol from ._network_management_client_enums import ProvisioningState from ._network_management_client_enums import PublicIPAddressSkuName from ._network_management_client_enums import PublicIPPrefixSkuName from ._network_management_client_enums import ResourceIdentityType from ._network_management_client_enums import RouteFilterRuleType from ._network_management_client_enums import RouteNextHopType from ._network_management_client_enums import RoutingState from ._network_management_client_enums import SecurityPartnerProviderConnectionStatus from ._network_management_client_enums import SecurityProviderName from ._network_management_client_enums import SecurityRuleAccess from ._network_management_client_enums import SecurityRuleDirection from ._network_management_client_enums import SecurityRuleProtocol from ._network_management_client_enums import ServiceProviderProvisioningState from ._network_management_client_enums import Severity from ._network_management_client_enums import TransportProtocol from ._network_management_client_enums import TunnelConnectionStatus from ._network_management_client_enums import UsageUnit from ._network_management_client_enums import VerbosityLevel from ._network_management_client_enums import VirtualNetworkGatewayConnectionProtocol from ._network_management_client_enums import VirtualNetworkGatewayConnectionStatus from ._network_management_client_enums import VirtualNetworkGatewayConnectionType from ._network_management_client_enums import VirtualNetworkGatewaySkuName from ._network_management_client_enums import VirtualNetworkGatewaySkuTier from ._network_management_client_enums import VirtualNetworkGatewayType from ._network_management_client_enums import VirtualNetworkPeeringState from ._network_management_client_enums import VirtualWanSecurityProviderType from ._network_management_client_enums import VpnAuthenticationType from ._network_management_client_enums import VpnClientProtocol from ._network_management_client_enums import VpnConnectionStatus from ._network_management_client_enums import VpnGatewayGeneration from ._network_management_client_enums import VpnGatewayTunnelingProtocol from ._network_management_client_enums import VpnType from ._network_management_client_enums import WebApplicationFirewallAction from ._network_management_client_enums import WebApplicationFirewallEnabledState from ._network_management_client_enums import WebApplicationFirewallMatchVariable from ._network_management_client_enums import WebApplicationFirewallMode from ._network_management_client_enums import WebApplicationFirewallOperator from ._network_management_client_enums import WebApplicationFirewallPolicyResourceState from ._network_management_client_enums import WebApplicationFirewallRuleType from ._network_management_client_enums import WebApplicationFirewallTransform from ._patch import __all__ as _patch_all from ._patch import * # type: ignore # pylint: disable=unused-wildcard-import from ._patch import patch_sdk as _patch_sdk __all__ = [ "AadAuthenticationParameters", "AddressSpace", "ApplicationGateway", "ApplicationGatewayAuthenticationCertificate", "ApplicationGatewayAutoscaleConfiguration", "ApplicationGatewayAvailableSslOptions", "ApplicationGatewayAvailableSslPredefinedPolicies", "ApplicationGatewayAvailableWafRuleSetsResult", "ApplicationGatewayBackendAddress", "ApplicationGatewayBackendAddressPool", "ApplicationGatewayBackendHealth", "ApplicationGatewayBackendHealthHttpSettings", "ApplicationGatewayBackendHealthOnDemand", "ApplicationGatewayBackendHealthPool", "ApplicationGatewayBackendHealthServer", "ApplicationGatewayBackendHttpSettings", "ApplicationGatewayConnectionDraining", "ApplicationGatewayCustomError", "ApplicationGatewayFirewallDisabledRuleGroup", "ApplicationGatewayFirewallExclusion", "ApplicationGatewayFirewallRule", "ApplicationGatewayFirewallRuleGroup", "ApplicationGatewayFirewallRuleSet", "ApplicationGatewayFrontendIPConfiguration", "ApplicationGatewayFrontendPort", "ApplicationGatewayHeaderConfiguration", "ApplicationGatewayHttpListener", "ApplicationGatewayIPConfiguration", "ApplicationGatewayListResult", "ApplicationGatewayOnDemandProbe", "ApplicationGatewayPathRule", "ApplicationGatewayPrivateEndpointConnection", "ApplicationGatewayPrivateEndpointConnectionListResult", "ApplicationGatewayPrivateLinkConfiguration", "ApplicationGatewayPrivateLinkIpConfiguration", "ApplicationGatewayPrivateLinkResource", "ApplicationGatewayPrivateLinkResourceListResult", "ApplicationGatewayProbe", "ApplicationGatewayProbeHealthResponseMatch", "ApplicationGatewayRedirectConfiguration", "ApplicationGatewayRequestRoutingRule", "ApplicationGatewayRewriteRule", "ApplicationGatewayRewriteRuleActionSet", "ApplicationGatewayRewriteRuleCondition", "ApplicationGatewayRewriteRuleSet", "ApplicationGatewaySku", "ApplicationGatewaySslCertificate", "ApplicationGatewaySslPolicy", "ApplicationGatewaySslPredefinedPolicy", "ApplicationGatewayTrustedRootCertificate", "ApplicationGatewayUrlConfiguration", "ApplicationGatewayUrlPathMap", "ApplicationGatewayWebApplicationFirewallConfiguration", "ApplicationRule", "ApplicationSecurityGroup", "ApplicationSecurityGroupListResult", "AuthorizationListResult", "AutoApprovedPrivateLinkService", "AutoApprovedPrivateLinkServicesResult", "Availability", "AvailableDelegation", "AvailableDelegationsResult", "AvailablePrivateEndpointType", "AvailablePrivateEndpointTypesResult", "AvailableProvidersList", "AvailableProvidersListCity", "AvailableProvidersListCountry", "AvailableProvidersListParameters", "AvailableProvidersListState", "AvailableServiceAlias", "AvailableServiceAliasesResult", "AzureAsyncOperationResult", "AzureFirewall", "AzureFirewallApplicationRule", "AzureFirewallApplicationRuleCollection", "AzureFirewallApplicationRuleProtocol", "AzureFirewallFqdnTag", "AzureFirewallFqdnTagListResult", "AzureFirewallIPConfiguration", "AzureFirewallIpGroups", "AzureFirewallListResult", "AzureFirewallNatRCAction", "AzureFirewallNatRule", "AzureFirewallNatRuleCollection", "AzureFirewallNetworkRule", "AzureFirewallNetworkRuleCollection", "AzureFirewallPublicIPAddress", "AzureFirewallRCAction", "AzureFirewallSku", "AzureReachabilityReport", "AzureReachabilityReportItem", "AzureReachabilityReportLatencyInfo", "AzureReachabilityReportLocation", "AzureReachabilityReportParameters", "BGPCommunity", "BackendAddressPool", "BastionActiveSession", "BastionActiveSessionListResult", "BastionHost", "BastionHostIPConfiguration", "BastionHostListResult", "BastionSessionDeleteResult", "BastionSessionState", "BastionShareableLink", "BastionShareableLinkListRequest", "BastionShareableLinkListResult", "BgpConnection", "BgpPeerStatus", "BgpPeerStatusListResult", "BgpServiceCommunity", "BgpServiceCommunityListResult", "BgpSettings", "BreakOutCategoryPolicies", "CheckPrivateLinkServiceVisibilityRequest", "CloudErrorBody", "Components1Jq1T4ISchemasManagedserviceidentityPropertiesUserassignedidentitiesAdditionalproperties", "ConnectionMonitor", "ConnectionMonitorDestination", "ConnectionMonitorEndpoint", "ConnectionMonitorEndpointFilter", "ConnectionMonitorEndpointFilterItem", "ConnectionMonitorHttpConfiguration", "ConnectionMonitorIcmpConfiguration", "ConnectionMonitorListResult", "ConnectionMonitorOutput", "ConnectionMonitorParameters", "ConnectionMonitorQueryResult", "ConnectionMonitorResult", "ConnectionMonitorResultProperties", "ConnectionMonitorSource", "ConnectionMonitorSuccessThreshold", "ConnectionMonitorTcpConfiguration", "ConnectionMonitorTestConfiguration", "ConnectionMonitorTestGroup", "ConnectionMonitorWorkspaceSettings", "ConnectionResetSharedKey", "ConnectionSharedKey", "ConnectionStateSnapshot", "ConnectivityDestination", "ConnectivityHop", "ConnectivityInformation", "ConnectivityIssue", "ConnectivityParameters", "ConnectivitySource", "Container", "ContainerNetworkInterface", "ContainerNetworkInterfaceConfiguration", "ContainerNetworkInterfaceIpConfiguration", "CustomDnsConfigPropertiesFormat", "DdosCustomPolicy", "DdosProtectionPlan", "DdosProtectionPlanListResult", "DdosSettings", "Delegation", "DeviceProperties", "DhcpOptions", "Dimension", "DnsNameAvailabilityResult", "DnsSettings", "EffectiveNetworkSecurityGroup", "EffectiveNetworkSecurityGroupAssociation", "EffectiveNetworkSecurityGroupListResult", "EffectiveNetworkSecurityRule", "EffectiveRoute", "EffectiveRouteListResult", "EffectiveRoutesParameters", "EndpointServiceResult", "EndpointServicesListResult", "Error", "ErrorDetails", "ErrorResponse", "EvaluatedNetworkSecurityGroup", "ExpressRouteCircuit", "ExpressRouteCircuitArpTable", "ExpressRouteCircuitAuthorization", "ExpressRouteCircuitConnection", "ExpressRouteCircuitConnectionListResult", "ExpressRouteCircuitListResult", "ExpressRouteCircuitPeering", "ExpressRouteCircuitPeeringConfig", "ExpressRouteCircuitPeeringId", "ExpressRouteCircuitPeeringListResult", "ExpressRouteCircuitReference", "ExpressRouteCircuitRoutesTable", "ExpressRouteCircuitRoutesTableSummary", "ExpressRouteCircuitServiceProviderProperties", "ExpressRouteCircuitSku", "ExpressRouteCircuitStats", "ExpressRouteCircuitsArpTableListResult", "ExpressRouteCircuitsRoutesTableListResult", "ExpressRouteCircuitsRoutesTableSummaryListResult", "ExpressRouteConnection", "ExpressRouteConnectionId", "ExpressRouteConnectionList", "ExpressRouteCrossConnection", "ExpressRouteCrossConnectionListResult", "ExpressRouteCrossConnectionPeering", "ExpressRouteCrossConnectionPeeringList", "ExpressRouteCrossConnectionRoutesTableSummary", "ExpressRouteCrossConnectionsRoutesTableSummaryListResult", "ExpressRouteGateway", "ExpressRouteGatewayList", "ExpressRouteGatewayPropertiesAutoScaleConfiguration", "ExpressRouteGatewayPropertiesAutoScaleConfigurationBounds", "ExpressRouteLink", "ExpressRouteLinkListResult", "ExpressRouteLinkMacSecConfig", "ExpressRoutePort", "ExpressRoutePortListResult", "ExpressRoutePortsLocation", "ExpressRoutePortsLocationBandwidths", "ExpressRoutePortsLocationListResult", "ExpressRouteServiceProvider", "ExpressRouteServiceProviderBandwidthsOffered", "ExpressRouteServiceProviderListResult", "FirewallPolicy", "FirewallPolicyFilterRuleCollection", "FirewallPolicyFilterRuleCollectionAction", "FirewallPolicyListResult", "FirewallPolicyNatRuleCollection", "FirewallPolicyNatRuleCollectionAction", "FirewallPolicyRule", "FirewallPolicyRuleApplicationProtocol", "FirewallPolicyRuleCollection", "FirewallPolicyRuleCollectionGroup", "FirewallPolicyRuleCollectionGroupListResult", "FirewallPolicyThreatIntelWhitelist", "FlowLog", "FlowLogFormatParameters", "FlowLogInformation", "FlowLogListResult", "FlowLogStatusParameters", "FrontendIPConfiguration", "GatewayRoute", "GatewayRouteListResult", "GetVpnSitesConfigurationRequest", "HTTPConfiguration", "HTTPHeader", "HopLink", "HubIPAddresses", "HubIpConfiguration", "HubPublicIPAddresses", "HubRoute", "HubRouteTable", "HubVirtualNetworkConnection", "IPAddressAvailabilityResult", "IPConfiguration", "IPConfigurationBgpPeeringAddress", "IPConfigurationProfile", "InboundNatPool", "InboundNatRule", "InboundNatRuleListResult", "IpAllocation", "IpAllocationListResult", "IpGroup", "IpGroupListResult", "IpTag", "IpsecPolicy", "Ipv6CircuitConnectionConfig", "Ipv6ExpressRouteCircuitPeeringConfig", "ListHubRouteTablesResult", "ListHubVirtualNetworkConnectionsResult", "ListP2SVpnGatewaysResult", "ListVirtualHubBgpConnectionResults", "ListVirtualHubIpConfigurationResults", "ListVirtualHubRouteTableV2SResult", "ListVirtualHubsResult", "ListVirtualWANsResult", "ListVpnConnectionsResult", "ListVpnGatewaysResult", "ListVpnServerConfigurationsResult", "ListVpnSiteLinkConnectionsResult", "ListVpnSiteLinksResult", "ListVpnSitesResult", "LoadBalancer", "LoadBalancerBackendAddress", "LoadBalancerBackendAddressPoolListResult", "LoadBalancerFrontendIPConfigurationListResult", "LoadBalancerListResult", "LoadBalancerLoadBalancingRuleListResult", "LoadBalancerOutboundRuleListResult", "LoadBalancerProbeListResult", "LoadBalancerSku", "LoadBalancingRule", "LocalNetworkGateway", "LocalNetworkGatewayListResult", "LogSpecification", "ManagedRuleGroupOverride", "ManagedRuleOverride", "ManagedRuleSet", "ManagedRulesDefinition", "ManagedServiceIdentity", "MatchCondition", "MatchVariable", "MatchedRule", "MetricSpecification", "NatGateway", "NatGatewayListResult", "NatGatewaySku", "NatRule", "NetworkConfigurationDiagnosticParameters", "NetworkConfigurationDiagnosticProfile", "NetworkConfigurationDiagnosticResponse", "NetworkConfigurationDiagnosticResult", "NetworkIntentPolicy", "NetworkIntentPolicyConfiguration", "NetworkInterface", "NetworkInterfaceAssociation", "NetworkInterfaceDnsSettings", "NetworkInterfaceIPConfiguration", "NetworkInterfaceIPConfigurationListResult", "NetworkInterfaceIPConfigurationPrivateLinkConnectionProperties", "NetworkInterfaceListResult", "NetworkInterfaceLoadBalancerListResult", "NetworkInterfaceTapConfiguration", "NetworkInterfaceTapConfigurationListResult", "NetworkProfile", "NetworkProfileListResult", "NetworkRule", "NetworkSecurityGroup", "NetworkSecurityGroupListResult", "NetworkSecurityGroupResult", "NetworkSecurityRulesEvaluationResult", "NetworkVirtualAppliance", "NetworkVirtualApplianceListResult", "NetworkVirtualApplianceSiteListResult", "NetworkVirtualApplianceSku", "NetworkVirtualApplianceSkuInstances", "NetworkVirtualApplianceSkuListResult", "NetworkWatcher", "NetworkWatcherListResult", "NextHopParameters", "NextHopResult", "Office365PolicyProperties", "Operation", "OperationDisplay", "OperationListResult", "OperationPropertiesFormatServiceSpecification", "OutboundRule", "OwaspCrsExclusionEntry", "P2SConnectionConfiguration", "P2SVpnConnectionHealth", "P2SVpnConnectionHealthRequest", "P2SVpnConnectionRequest", "P2SVpnGateway", "P2SVpnProfileParameters", "PacketCapture", "PacketCaptureFilter", "PacketCaptureListResult", "PacketCaptureParameters", "PacketCaptureQueryStatusResult", "PacketCaptureResult", "PacketCaptureResultProperties", "PacketCaptureStorageLocation", "PatchRouteFilter", "PatchRouteFilterRule", "PeerExpressRouteCircuitConnection", "PeerExpressRouteCircuitConnectionListResult", "PolicySettings", "PrepareNetworkPoliciesRequest", "PrivateDnsZoneConfig", "PrivateDnsZoneGroup", "PrivateDnsZoneGroupListResult", "PrivateEndpoint", "PrivateEndpointConnection", "PrivateEndpointConnectionListResult", "PrivateEndpointListResult", "PrivateLinkService", "PrivateLinkServiceConnection", "PrivateLinkServiceConnectionState", "PrivateLinkServiceIpConfiguration", "PrivateLinkServiceListResult", "PrivateLinkServicePropertiesAutoApproval", "PrivateLinkServicePropertiesVisibility", "PrivateLinkServiceVisibility", "Probe", "PropagatedRouteTable", "ProtocolConfiguration", "ProtocolCustomSettingsFormat", "PublicIPAddress", "PublicIPAddressDnsSettings", "PublicIPAddressListResult", "PublicIPAddressSku", "PublicIPPrefix", "PublicIPPrefixListResult", "PublicIPPrefixSku", "QueryTroubleshootingParameters", "RadiusServer", "RecordSet", "ReferencedPublicIpAddress", "Resource", "ResourceNavigationLink", "ResourceNavigationLinksListResult", "ResourceSet", "RetentionPolicyParameters", "Route", "RouteFilter", "RouteFilterListResult", "RouteFilterRule", "RouteFilterRuleListResult", "RouteListResult", "RouteTable", "RouteTableListResult", "RoutingConfiguration", "SecurityGroupNetworkInterface", "SecurityGroupViewParameters", "SecurityGroupViewResult", "SecurityPartnerProvider", "SecurityPartnerProviderListResult", "SecurityRule", "SecurityRuleAssociations", "SecurityRuleListResult", "ServiceAssociationLink", "ServiceAssociationLinksListResult", "ServiceEndpointPolicy", "ServiceEndpointPolicyDefinition", "ServiceEndpointPolicyDefinitionListResult", "ServiceEndpointPolicyListResult", "ServiceEndpointPropertiesFormat", "ServiceTagInformation", "ServiceTagInformationPropertiesFormat", "ServiceTagsListResult", "SessionIds", "StaticRoute", "SubResource", "Subnet", "SubnetAssociation", "SubnetListResult", "TagsObject", "Topology", "TopologyAssociation", "TopologyParameters", "TopologyResource", "TrafficAnalyticsConfigurationProperties", "TrafficAnalyticsProperties", "TrafficSelectorPolicy", "TroubleshootingDetails", "TroubleshootingParameters", "TroubleshootingRecommendedActions", "TroubleshootingResult", "TunnelConnectionHealth", "UnprepareNetworkPoliciesRequest", "Usage", "UsageName", "UsagesListResult", "VM", "VerificationIPFlowParameters", "VerificationIPFlowResult", "VirtualApplianceNicProperties", "VirtualApplianceSite", "VirtualApplianceSkuProperties", "VirtualHub", "VirtualHubEffectiveRoute", "VirtualHubEffectiveRouteList", "VirtualHubId", "VirtualHubRoute", "VirtualHubRouteTable", "VirtualHubRouteTableV2", "VirtualHubRouteV2", "VirtualNetwork", "VirtualNetworkBgpCommunities", "VirtualNetworkConnectionGatewayReference", "VirtualNetworkGateway", "VirtualNetworkGatewayConnection", "VirtualNetworkGatewayConnectionListEntity", "VirtualNetworkGatewayConnectionListResult", "VirtualNetworkGatewayIPConfiguration", "VirtualNetworkGatewayListConnectionsResult", "VirtualNetworkGatewayListResult", "VirtualNetworkGatewaySku", "VirtualNetworkListResult", "VirtualNetworkListUsageResult", "VirtualNetworkPeering", "VirtualNetworkPeeringListResult", "VirtualNetworkTap", "VirtualNetworkTapListResult", "VirtualNetworkUsage", "VirtualNetworkUsageName", "VirtualRouter", "VirtualRouterListResult", "VirtualRouterPeering", "VirtualRouterPeeringListResult", "VirtualWAN", "VirtualWanSecurityProvider", "VirtualWanSecurityProviders", "VirtualWanVpnProfileParameters", "VnetRoute", "VpnClientConfiguration", "VpnClientConnectionHealth", "VpnClientConnectionHealthDetail", "VpnClientConnectionHealthDetailListResult", "VpnClientIPsecParameters", "VpnClientParameters", "VpnClientRevokedCertificate", "VpnClientRootCertificate", "VpnConnection", "VpnDeviceScriptParameters", "VpnGateway", "VpnLinkBgpSettings", "VpnLinkProviderProperties", "VpnPacketCaptureStartParameters", "VpnPacketCaptureStopParameters", "VpnProfileResponse", "VpnServerConfigRadiusClientRootCertificate", "VpnServerConfigRadiusServerRootCertificate", "VpnServerConfigVpnClientRevokedCertificate", "VpnServerConfigVpnClientRootCertificate", "VpnServerConfiguration", "VpnServerConfigurationsResponse", "VpnSite", "VpnSiteId", "VpnSiteLink", "VpnSiteLinkConnection", "WebApplicationFirewallCustomRule", "WebApplicationFirewallPolicy", "WebApplicationFirewallPolicyListResult", "Access", "ApplicationGatewayBackendHealthServerHealth", "ApplicationGatewayCookieBasedAffinity", "ApplicationGatewayCustomErrorStatusCode", "ApplicationGatewayFirewallMode", "ApplicationGatewayOperationalState", "ApplicationGatewayProtocol", "ApplicationGatewayRedirectType", "ApplicationGatewayRequestRoutingRuleType", "ApplicationGatewaySkuName", "ApplicationGatewaySslCipherSuite", "ApplicationGatewaySslPolicyName", "ApplicationGatewaySslPolicyType", "ApplicationGatewaySslProtocol", "ApplicationGatewayTier", "AssociationType", "AuthenticationMethod", "AuthorizationUseStatus", "AzureFirewallApplicationRuleProtocolType", "AzureFirewallNatRCActionType", "AzureFirewallNetworkRuleProtocol", "AzureFirewallRCActionType", "AzureFirewallSkuName", "AzureFirewallSkuTier", "AzureFirewallThreatIntelMode", "BastionConnectProtocol", "BgpPeerState", "CircuitConnectionStatus", "ConnectionMonitorEndpointFilterItemType", "ConnectionMonitorEndpointFilterType", "ConnectionMonitorSourceStatus", "ConnectionMonitorTestConfigurationProtocol", "ConnectionMonitorType", "ConnectionState", "ConnectionStatus", "DdosCustomPolicyProtocol", "DdosCustomPolicyTriggerSensitivityOverride", "DdosSettingsProtectionCoverage", "DhGroup", "Direction", "EffectiveRouteSource", "EffectiveRouteState", "EffectiveSecurityRuleProtocol", "EvaluationState", "ExpressRouteCircuitPeeringAdvertisedPublicPrefixState", "ExpressRouteCircuitPeeringState", "ExpressRouteCircuitSkuFamily", "ExpressRouteCircuitSkuTier", "ExpressRouteLinkAdminState", "ExpressRouteLinkConnectorType", "ExpressRouteLinkMacSecCipher", "ExpressRoutePeeringState", "ExpressRoutePeeringType", "ExpressRoutePortsEncapsulation", "FirewallPolicyFilterRuleCollectionActionType", "FirewallPolicyNatRuleCollectionActionType", "FirewallPolicyRuleApplicationProtocolType", "FirewallPolicyRuleCollectionType", "FirewallPolicyRuleNetworkProtocol", "FirewallPolicyRuleType", "FlowLogFormatType", "HTTPConfigurationMethod", "HTTPMethod", "HubBgpConnectionStatus", "HubVirtualNetworkConnectionStatus", "IPAllocationMethod", "IPVersion", "IkeEncryption", "IkeIntegrity", "IpAllocationType", "IpFlowProtocol", "IpsecEncryption", "IpsecIntegrity", "IssueType", "LoadBalancerOutboundRuleProtocol", "LoadBalancerSkuName", "LoadDistribution", "ManagedRuleEnabledState", "NatGatewaySkuName", "NetworkOperationStatus", "NextHopType", "OfficeTrafficCategory", "Origin", "OutputType", "OwaspCrsExclusionEntryMatchVariable", "OwaspCrsExclusionEntrySelectorMatchOperator", "PcError", "PcProtocol", "PcStatus", "PfsGroup", "PreferredIPVersion", "ProbeProtocol", "ProcessorArchitecture", "Protocol", "ProvisioningState", "PublicIPAddressSkuName", "PublicIPPrefixSkuName", "ResourceIdentityType", "RouteFilterRuleType", "RouteNextHopType", "RoutingState", "SecurityPartnerProviderConnectionStatus", "SecurityProviderName", "SecurityRuleAccess", "SecurityRuleDirection", "SecurityRuleProtocol", "ServiceProviderProvisioningState", "Severity", "TransportProtocol", "TunnelConnectionStatus", "UsageUnit", "VerbosityLevel", "VirtualNetworkGatewayConnectionProtocol", "VirtualNetworkGatewayConnectionStatus", "VirtualNetworkGatewayConnectionType", "VirtualNetworkGatewaySkuName", "VirtualNetworkGatewaySkuTier", "VirtualNetworkGatewayType", "VirtualNetworkPeeringState", "VirtualWanSecurityProviderType", "VpnAuthenticationType", "VpnClientProtocol", "VpnConnectionStatus", "VpnGatewayGeneration", "VpnGatewayTunnelingProtocol", "VpnType", "WebApplicationFirewallAction", "WebApplicationFirewallEnabledState", "WebApplicationFirewallMatchVariable", "WebApplicationFirewallMode", "WebApplicationFirewallOperator", "WebApplicationFirewallPolicyResourceState", "WebApplicationFirewallRuleType", "WebApplicationFirewallTransform", ] __all__.extend([p for p in _patch_all if p not in __all__]) _patch_sdk()
{ "content_hash": "6c55e1483225ace8d48452981f8dfb53", "timestamp": "", "source": "github", "line_count": 1311, "max_line_length": 105, "avg_line_length": 44.36079328756674, "alnum_prop": 0.8260570524614406, "repo_name": "Azure/azure-sdk-for-python", "id": "425b5433d9da4f1152e3b697fc84e60af9494f79", "size": "58625", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_05_01/models/__init__.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "1224" }, { "name": "Bicep", "bytes": "24196" }, { "name": "CSS", "bytes": "6089" }, { "name": "Dockerfile", "bytes": "4892" }, { "name": "HTML", "bytes": "12058" }, { "name": "JavaScript", "bytes": "8137" }, { "name": "Jinja", "bytes": "10377" }, { "name": "Jupyter Notebook", "bytes": "272022" }, { "name": "PowerShell", "bytes": "518535" }, { "name": "Python", "bytes": "715484989" }, { "name": "Shell", "bytes": "3631" } ], "symlink_target": "" }
""" Sahana Eden Person Registry Model @author: Dominic König <dominic[at]aidiq.com> @copyright: 2009-2012 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __all__ = ["S3PersonEntity", "S3OrgAuthModel", "S3PersonModel", "S3GroupModel", "S3ContactModel", "S3PersonAddressModel", "S3PersonImageModel", "S3PersonIdentityModel", "S3SavedSearch", "S3PersonPresence", "S3PersonDescription", "pr_pentity_represent", "pr_person_represent", "pr_person_comment", "pr_contacts", "pr_rheader", "pr_update_affiliations", "pr_add_affiliation", "pr_remove_affiliation", "pr_get_pe_id", "pr_define_role", "pr_delete_role", "pr_add_to_role", "pr_remove_from_role", "pr_get_role_paths", "pr_get_role_branches", "pr_get_path", "pr_get_ancestors", "pr_get_descendants", "pr_rebuild_path", "pr_role_rebuild_path"] import gluon.contrib.simplejson as json from gluon import * from gluon.dal import Row from gluon.storage import Storage from gluon.sqlhtml import RadioWidget from ..s3 import * OU = 1 # role type which indicates hierarchy, see role_types OTHER_ROLE = 9 # ============================================================================= class S3PersonEntity(S3Model): """ Person Super-Entity """ names = ["pr_pentity", "pr_affiliation", "pr_role", "pr_role_types", "pr_role_id", "pr_pe_label", "pr_pe_types"] def model(self): db = current.db T = current.T s3 = current.response.s3 add_component = self.add_component configure = self.configure crud_strings = s3.crud_strings define_table = self.define_table meta_fields= s3.meta_fields super_entity = self.super_entity super_key = self.super_key super_link = self.super_link YES = T("yes") #current.messages.YES NO = T("no") #current.messages.NO UNKNOWN_OPT = current.messages.UNKNOWN_OPT # --------------------------------------------------------------------- # Person Super-Entity # pe_types = Storage(pr_person = T("Person"), pr_group = T("Group"), org_organisation = T("Organization"), org_office = T("Office"), dvi_body = T("Body")) tablename = "pr_pentity" table = super_entity(tablename, "pe_id", pe_types, Field("type"), Field("pe_label", length=128)) # Search method pentity_search = S3PentitySearch(name = "pentity_search_simple", label = T("Name and/or ID"), comment = T(""), field = ["pe_label"]) pentity_search.pentity_represent = pr_pentity_represent # Resource configuration configure(tablename, editable=False, deletable=False, listadd=False, onaccept=self.pr_pentity_onaccept, search_method=pentity_search) # Reusable fields pr_pe_label = S3ReusableField("pe_label", length=128, label = T("ID Tag Number"), requires = IS_NULL_OR(IS_NOT_ONE_OF(db, "pr_pentity.pe_label"))) # Components pe_id = super_key(table) add_component("pr_contact_emergency", pr_pentity=pe_id) add_component("pr_address", pr_pentity=pe_id) add_component("pr_image", pr_pentity=pe_id) add_component("pr_contact", pr_pentity=pe_id) add_component("pr_note", pr_pentity=pe_id) add_component("pr_physical_description", pr_pentity=dict(joinby=pe_id, multiple=False)) add_component("dvi_identification", pr_pentity=dict(joinby=pe_id, multiple=False)) add_component("dvi_effects", pr_pentity=dict(joinby=pe_id, multiple=False)) add_component("dvi_checklist", pr_pentity=dict(joinby=pe_id, multiple=False)) # Map Configs # - Personalised configurations # - OU configurations (Organisation/Branch/Facility/Team) add_component("gis_config", pr_pentity=dict(joinby=pe_id, multiple=False)) # --------------------------------------------------------------------- # Person <-> User # utable = current.auth.settings.table_user tablename = "pr_person_user" table = define_table(tablename, super_link("pe_id", "pr_pentity"), Field("user_id", utable), *meta_fields()) # --------------------------------------------------------------------- # Role (Affiliates Group) # role_types = { 1:T("Organizational Units"), # business hierarchy (reporting units) 2:T("Membership"), # membership role 3:T("Association"), # other non-reporting role 9:T("Other") # other role type } tablename = "pr_role" table = define_table(tablename, # The "parent" entity super_link("pe_id", "pr_pentity", label=T("Corporate Entity"), readable=True, writable=True), # Role type Field("role_type", "integer", requires = IS_IN_SET(role_types, zero=None), represent = lambda opt: \ role_types.get(opt, UNKNOWN_OPT)), # Role name Field("role", notnull=True), # Path, for faster lookups Field("path", readable = False, writable = False), # Type filter, type of entities which can have this role Field("entity_type", "string", requires = IS_EMPTY_OR(IS_IN_SET(pe_types, zero=T("ANY"))), represent = lambda opt: pe_types.get(opt, UNKNOWN_OPT)), # Subtype filter, if the entity type defines its own type Field("sub_type", "integer", readable = False, writable = False), *meta_fields()) # Field configuration table.pe_id.requires = IS_ONE_OF(db, "pr_pentity.pe_id", pr_pentity_represent, sort=True) table.pe_id.represent = pr_pentity_represent # CRUD Strings crud_strings[tablename] = Storage( title_create = T("Add Role"), title_display = T("Role Details"), title_list = T("Roles"), title_update = T("Edit Role"), title_search = T("Search Roles"), subtitle_create = T("Add New Role"), subtitle_list = T("List of Roles"), label_list_button = T("List Roles"), label_create_button = T("Add Role"), label_delete_button = T("Delete Role"), msg_record_created = T("Role added"), msg_record_modified = T("Role updated"), msg_record_deleted = T("Role deleted"), msg_list_empty = T("No Roles defined")) # Resource configuration configure(tablename, onvalidation=self.pr_role_onvalidation) # Reusable fields role_id = S3ReusableField("role_id", db.pr_role, requires = IS_ONE_OF(db, "pr_role.id", self.pr_role_represent), represent = self.pr_role_represent, label = T("Role"), ondelete = "CASCADE") # --------------------------------------------------------------------- # Affiliation # tablename = "pr_affiliation" table = define_table(tablename, role_id(), super_link("pe_id", "pr_pentity", label=T("Entity"), readable=True, writable=True), *meta_fields()) table.pe_id.requires = IS_ONE_OF(db, "pr_pentity.pe_id", pr_pentity_represent, sort=True) table.pe_id.represent = pr_pentity_represent # CRUD Strings crud_strings[tablename] = Storage( title_create = T("Add Affiliation"), title_display = T("Affiliation Details"), title_list = T("Affiliations"), title_update = T("Edit Affiliation"), title_search = T("Search Affiliations"), subtitle_create = T("Add New Affiliation"), subtitle_list = T("List of Affiliations"), label_list_button = T("List Affiliations"), label_create_button = T("Add Affiliation"), label_delete_button = T("Delete Affiliation"), msg_record_created = T("Affiliation added"), msg_record_modified = T("Affiliation updated"), msg_record_deleted = T("Affiliation deleted"), msg_list_empty = T("No Affiliations defined")) # Resource configuration configure(tablename, onaccept=self.pr_affiliation_onaccept, ondelete=self.pr_affiliation_ondelete) # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage( pr_pe_types=pe_types, pr_pe_label=pr_pe_label, pr_role_types=role_types, pr_role_id=role_id, ) # ------------------------------------------------------------------------- @staticmethod def pr_role_represent(role_id): """ Represent an entity role @param role_id: the pr_role record ID """ db = current.db s3db = current.s3db table = s3db.pr_role role = db(table.id == role_id).select(table.role, table.pe_id, limitby=(0, 1)).first() if role: entity = pr_pentity_represent(role.pe_id) return "%s: %s" % (entity, role.role) else: return current.messages.NONE # ------------------------------------------------------------------------- @staticmethod def pr_role_onvalidation(form): """ Clear descendant paths if role type has changed @param form: the CRUD form """ db = current.db s3db = current.s3db formvars = form.vars if not formvars: return if "role_type" in formvars: role_id = form.record_id if not role_id: return role_type = formvars.role_type rtable = s3db.pr_role role = db(rtable.id == role_id).select(rtable.role_type, limitby=(0, 1)).first() if role and str(role.role_type) != str(role_type): # If role type has changed, then clear paths if str(role_type) != str(OU): formvars["path"] = None s3db.pr_role_rebuild_path(role_id, clear=True) return # ------------------------------------------------------------------------- @staticmethod def pr_pentity_onaccept(form): """ Update organisation affiliations for org_site instances. """ db = current.db s3db = current.s3db ptable = s3db.pr_pentity pe_id = form.vars.pe_id pe = db(ptable.pe_id == pe_id).select(ptable.instance_type, limitby=(0, 1)).first() if pe: itable = s3db.table(pe.instance_type, None) if itable and \ "site_id" in itable.fields and \ "organisation_id" in itable.fields: q = itable.pe_id == pe_id instance = db(q).select(itable.pe_id, itable.organisation_id, limitby=(0, 1)).first() if instance: s3db.pr_update_affiliations("org_site", instance) return # ------------------------------------------------------------------------- @staticmethod def pr_affiliation_onaccept(form): """ Remove duplicate affiliations and clear descendant paths (to trigger lazy rebuild) @param form: the CRUD form """ db = current.db s3db = current.s3db manager = current.manager atable = s3db.pr_affiliation formvars = form.vars role_id = formvars["role_id"] pe_id = formvars["pe_id"] record_id = formvars["id"] if role_id and pe_id and record_id: # Remove duplicates query = (atable.id != record_id) & \ (atable.role_id == role_id) & \ (atable.pe_id == pe_id) deleted_fk = {"role_id": role_id, "pe_id": pe_id} data = {"deleted": True, "role_id": None, "pe_id": None, "deleted_fk": json.dumps(deleted_fk)} db(query).update(**data) # Clear descendant paths s3db.pr_rebuild_path(pe_id, clear=True) return # ------------------------------------------------------------------------- @staticmethod def pr_affiliation_ondelete(row): """ Clear descendant paths, also called indirectly via ondelete-CASCADE when a role gets deleted. @param row: the deleted row """ db = current.db s3db = current.s3db atable = s3db.pr_affiliation if row and row.id: query = atable.id == row.id record = db(query).select(atable.deleted_fk, limitby=(0, 1)).first() else: return if record: data = json.loads(record.deleted_fk) pe_id = data.get("pe_id", None) if pe_id: s3db.pr_rebuild_path(pe_id, clear=True) return # ============================================================================= class S3OrgAuthModel(S3Model): """ Organisation-based Authorization Model """ names = ["pr_restriction", "pr_delegation"] def model(self): auth = current.auth s3 = current.response.s3 role_id = current.s3db.pr_role_id define_table = self.define_table super_link = self.super_link meta_fields = s3.meta_fields # --------------------------------------------------------------------- # Restriction: Person Entity <-> Auth Membership Link # This restricts the permissions assigned by an auth-group membership # to the records owned by this person entity. # mtable = auth.settings.table_membership tablename = "pr_restriction" table = define_table(tablename, super_link("pe_id", "pr_pentity"), Field("membership_id", mtable, ondelete="CASCADE"), *meta_fields()) # --------------------------------------------------------------------- # Delegation: Role <-> Auth Group Link # This "delegates" the permissions of a user group for the records # owned by a person entity to a group of affiliated entities. # gtable = auth.settings.table_group tablename = "pr_delegation" table = define_table(tablename, role_id(), Field("group_id", gtable, ondelete="CASCADE"), *meta_fields()) # --------------------------------------------------------------------- return Storage() # ============================================================================= class S3PersonModel(S3Model): """ Persons and Groups """ names = ["pr_person", "pr_person_user", "pr_gender", "pr_gender_opts", "pr_age_group", "pr_age_group_opts", "pr_person_id", ] def model(self): T = current.T db = current.db request = current.request s3 = current.response.s3 gis = current.gis settings = current.deployment_settings pe_label = self.pr_pe_label location_id = self.gis_location_id messages = current.messages UNKNOWN_OPT = messages.UNKNOWN_OPT SELECT_LOCATION = messages.SELECT_LOCATION define_table = self.define_table super_link = self.super_link add_component = self.add_component # --------------------------------------------------------------------- # Person # pr_gender_opts = { 1:"", 2:T("female"), 3:T("male") } pr_gender = S3ReusableField("gender", "integer", requires = IS_IN_SET(pr_gender_opts, zero=None), default = 1, label = T("Gender"), represent = lambda opt: \ pr_gender_opts.get(opt, UNKNOWN_OPT)) pr_age_group_opts = { 1:T("unknown"), 2:T("Infant (0-1)"), 3:T("Child (2-11)"), 4:T("Adolescent (12-20)"), 5:T("Adult (21-50)"), 6:T("Senior (50+)") } pr_age_group = S3ReusableField("age_group", "integer", requires = IS_IN_SET(pr_age_group_opts, zero=None), default = 1, label = T("Age Group"), represent = lambda opt: \ pr_age_group_opts.get(opt, UNKNOWN_OPT)) pr_marital_status_opts = { 1:T("unknown"), 2:T("single"), 3:T("married"), 4:T("separated"), 5:T("divorced"), 6:T("widowed"), 9:T("other") } pr_religion_opts = settings.get_L10n_religions() pr_impact_tags = { 1: T("injured"), 4: T("diseased"), 2: T("displaced"), 5: T("separated from family"), 3: T("suffered financial losses") } if settings.get_L10n_mandatory_lastname(): last_name_validate = IS_NOT_EMPTY(error_message = T("Please enter a last name")) else: last_name_validate = None s3_date_format = settings.get_L10n_date_format() tablename = "pr_person" table = define_table(tablename, super_link("pe_id", "pr_pentity"), super_link("track_id", "sit_trackable"), location_id(readable=False, writable=False), # base location pe_label(comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("ID Tag Number"), T("Number or Label on the identification tag this person is wearing (if any)."))))), Field("missing", "boolean", readable=False, writable=False, default=False, represent = lambda missing: \ (missing and ["missing"] or [""])[0]), Field("volunteer", "boolean", readable=False, writable=False, default=False), Field("first_name", notnull=True, default = "?" if current.auth.permission.format != "html" else "", length=64, # Mayon Compatibility # NB Not possible to have an IS_NAME() validator here # http://eden.sahanafoundation.org/ticket/834 requires = IS_NOT_EMPTY(error_message = T("Please enter a first name")), comment = DIV(_class="tooltip", _title="%s|%s" % (T("First name"), T("The first or only name of the person (mandatory)."))), label = T("First Name")), Field("middle_name", length=64, # Mayon Compatibility label = T("Middle Name")), Field("last_name", length=64, # Mayon Compatibility label = T("Last Name"), requires = last_name_validate), Field("initials", length=8, label = T("Initials")), Field("preferred_name", label = T("Preferred Name"), comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Preferred Name"), T("The name to be used when calling for or directly addressing the person (optional).")))), length=64), # Mayon Compatibility Field("local_name", label = T("Local Name"), comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Local Name"), T("Name of the person in local language and script (optional)."))))), pr_gender(label = T("Gender")), Field("date_of_birth", "date", label = T("Date of Birth"), requires = [IS_EMPTY_OR(IS_DATE_IN_RANGE( format = s3_date_format, maximum=request.utcnow.date(), error_message="%s %%(max)s!" % T("Enter a valid date before")))], widget = S3DateWidget(past=1320, # Months, so 110 years future=0)), pr_age_group(label = T("Age group")), Field("nationality", requires = IS_NULL_OR(IS_IN_SET_LAZY( lambda: gis.get_countries(key_type="code"), zero = SELECT_LOCATION)), label = T("Nationality"), comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Nationality"), T("Nationality of the person.")))), represent = lambda code: \ gis.get_country(code, key_type="code") or UNKNOWN_OPT), Field("occupation", label = T("Profession"), length=128), # Mayon Compatibility # Field("picture", "upload", # autodelete=True, # label = T("Picture"), # requires = IS_EMPTY_OR(IS_IMAGE(maxsize=(800, 800), # error_message=T("Upload an image file (bmp, gif, jpeg or png), max. 800x800 pixels!"))), # represent = lambda image: image and \ # DIV(A(IMG(_src=URL(c="default", f="download", # args=image), # _height=60, # _alt=T("View Picture")), # _href=URL(c="default", f="download", # args=image))) or # T("No Picture"), # comment = DIV(_class="tooltip", # _title="%s|%s" % (T("Picture"), # T("Upload an image file here.")))), Field("opt_in", "string", # list of mailing lists which link to teams default=False, label = T("Receive updates"), comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Mailing list"), T("By selecting this you agree that we may contact you.")))), ), s3.comments(), *(s3.lx_fields() + s3.meta_fields())) # CRUD Strings ADD_PERSON = current.messages.ADD_PERSON LIST_PERSONS = T("List Persons") s3.crud_strings[tablename] = Storage( title_create = T("Add a Person"), title_display = T("Person Details"), title_list = LIST_PERSONS, title_update = T("Edit Person Details"), title_search = T("Search Persons"), subtitle_create = ADD_PERSON, subtitle_list = T("Persons"), label_list_button = LIST_PERSONS, label_create_button = ADD_PERSON, label_delete_button = T("Delete Person"), msg_record_created = T("Person added"), msg_record_modified = T("Person details updated"), msg_record_deleted = T("Person deleted"), msg_list_empty = T("No Persons currently registered")) # add an opt in clause to receive emails depending on the deployment settings if current.deployment_settings.get_auth_opt_in_to_email(): table.opt_in.readable = True table.opt_in.writable = True else: table.opt_in.readable = False table.opt_in.writable = False # Search method pr_person_search = S3PersonSearch( name="person_search_simple", label=T("Name and/or ID"), comment=T("To search for a person, enter any of the first, middle or last names and/or an ID number of a person, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons."), field=["pe_label", "first_name", "middle_name", "last_name", "local_name", "identity.value" ]) # Resource configuration self.configure(tablename, super_entity=("pr_pentity", "sit_trackable"), list_fields = ["id", "first_name", "middle_name", "last_name", #"picture", "gender", "age_group", (T("Organization"), "hrm_human_resource:organisation_id$name") ], onvalidation=self.pr_person_onvalidation, search_method=pr_person_search, deduplicate=self.person_deduplicate, main="first_name", extra="last_name") person_id_comment = pr_person_comment( T("Person"), T("Type the first few characters of one of the Person's names."), child="person_id") person_id = S3ReusableField("person_id", db.pr_person, sortby = ["first_name", "middle_name", "last_name"], requires = IS_NULL_OR(IS_ONE_OF(db, "pr_person.id", pr_person_represent, orderby="pr_person.first_name", sort=True, error_message=T("Person must be specified!"))), represent = pr_person_represent, label = T("Person"), comment = person_id_comment, ondelete = "RESTRICT", widget = S3PersonAutocompleteWidget()) # Components add_component("pr_group_membership", pr_person="person_id") add_component("pr_identity", pr_person="person_id") add_component("pr_save_search", pr_person="person_id") add_component("msg_subscription", pr_person="person_id") # HR Record as component of Persons add_component("hrm_human_resource", pr_person="person_id") add_component("member_membership", pr_person="person_id") # Skills as components of Persons add_component("hrm_certification", pr_person="person_id") add_component("hrm_competency", pr_person="person_id") add_component("hrm_credential", pr_person="person_id") add_component("hrm_experience", pr_person="person_id") # @ToDo: Double link table to show the Courses attended? add_component("hrm_training", pr_person="person_id") # Assets as component of persons add_component("asset_asset", pr_person="assigned_to_id") # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage( pr_gender = pr_gender, pr_gender_opts = pr_gender_opts, pr_age_group = pr_age_group, pr_age_group_opts = pr_age_group_opts, pr_person_id = person_id, ) # ------------------------------------------------------------------------- @staticmethod def pr_person_onvalidation(form): """ Onvalidation callback """ try: age = int(form.vars.get("age_group", None)) except (ValueError, TypeError): age = None dob = form.vars.get("date_of_birth", None) if age and age != 1 and dob: now = request.utcnow dy = int((now.date() - dob).days / 365.25) if dy < 0: ag = 1 elif dy < 2: ag = 2 elif dy < 12: ag = 3 elif dy < 21: ag = 4 elif dy < 51: ag = 5 else: ag = 6 if age != ag: form.errors.age_group = T("Age group does not match actual age.") return False return True # ------------------------------------------------------------------------- @staticmethod def person_deduplicate(item): """ Import item deduplication """ db = current.db s3db = current.s3db # Ignore this processing if the id is set if item.id: return if item.tablename == "pr_person": ptable = s3db.pr_person ctable = s3db.pr_contact # Match by first name and last name, and if given, by email address fname = "first_name" in item.data and item.data.first_name lname = "last_name" in item.data and item.data.last_name if fname and lname: # "LIKE" is inappropriate here: # E.g. "Fran Boon" would overwrite "Frank Boones" #query = (ptable.first_name.lower().like('%%%s%%' % fname.lower())) & \ #(ptable.last_name.lower().like('%%%s%%' % lname.lower())) # But even an exact name match does not necessarily indicate a # duplicate: depending on the scope of the deployment, you could # have thousands of people with exactly the same names (or just # two of them - and it can already go wrong). # We take the email address as additional criterion, however, where # person data do not usually contain email addresses you might need # to add more/other criteria here query = (ptable.first_name.lower() == fname.lower()) & \ (ptable.last_name.lower() == lname.lower()) email = False for citem in item.components: if citem.tablename == "pr_contact": if "contact_method" in citem.data and \ citem.data.contact_method == "EMAIL": email = citem.data.value if email != False: query = query & \ (ptable.pe_id == ctable.pe_id) & \ (ctable.value.lower() == email.lower()) else: # Try Initials (this is a weak test but works well in small teams) initials = "initials" in item.data and item.data.initials if not initials: # Nothing we can do return query = (ptable.initials.lower() == initials.lower()) # Look for details on the database _duplicate = db(query).select(ptable.id, limitby=(0, 1)).first() if _duplicate: item.id = _duplicate.id item.data.id = _duplicate.id item.method = item.METHOD.UPDATE for citem in item.components: citem.method = citem.METHOD.UPDATE # ============================================================================= class S3GroupModel(S3Model): """ Groups """ names = ["pr_group", "pr_group_id", "pr_group_represent", "pr_group_membership"] def model(self): T = current.T db = current.db request = current.request s3 = current.response.s3 person_id = self.pr_person_id messages = current.messages NONE = messages.NONE UNKNOWN_OPT = messages.UNKNOWN_OPT comments = s3.comments configure = self.configure crud_strings = s3.crud_strings define_table = self.define_table meta_fields = s3.meta_fields # --------------------------------------------------------------------- # Group # pr_group_types = { 1:T("Family"), 2:T("Tourist Group"), 3:T("Relief Team"), 4:T("other"), 5:T("Mailing Lists"), } tablename = "pr_group" table = define_table(tablename, self.super_link("pe_id", "pr_pentity"), Field("group_type", "integer", requires = IS_IN_SET(pr_group_types, zero=None), default = 4, label = T("Group Type"), represent = lambda opt: \ pr_group_types.get(opt, UNKNOWN_OPT)), Field("system", "boolean", default=False, readable=False, writable=False), Field("name", label=T("Group Name"), requires = IS_NOT_EMPTY()), Field("description", label=T("Group Description")), comments(), *meta_fields()) # Field configuration table.description.comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Group description"), T("A brief description of the group (optional)")))) # CRUD Strings ADD_GROUP = T("Add Group") LIST_GROUPS = T("List Groups") crud_strings[tablename] = Storage( title_create = ADD_GROUP, title_display = T("Group Details"), title_list = LIST_GROUPS, title_update = T("Edit Group"), title_search = T("Search Groups"), subtitle_create = T("Add New Group"), subtitle_list = T("Groups"), label_list_button = LIST_GROUPS, label_create_button = ADD_GROUP, label_delete_button = T("Delete Group"), msg_record_created = T("Group added"), msg_record_modified = T("Group updated"), msg_record_deleted = T("Group deleted"), msg_list_empty = T("No Groups currently registered")) # CRUD Strings ADD_GROUP = T("Add Mailing List") LIST_GROUPS = T("Mailing List") mailing_list_crud_strings = Storage( title_create = ADD_GROUP, title_display = T("Mailing List Details"), title_list = LIST_GROUPS, title_update = T("Edit Mailing List"), title_search = T("Search Mailing Lists"), subtitle_create = T("Add New Mailing List"), subtitle_list = T("Mailing Lists"), label_list_button = LIST_GROUPS, label_create_button = ADD_GROUP, label_delete_button = T("Delete Mailing List"), msg_record_created = T("Mailing list added"), msg_record_modified = T("Mailing list updated"), msg_record_deleted = T("Mailing list deleted"), msg_list_empty = T("No Mailing List currently established")) # Resource configuration configure(tablename, super_entity="pr_pentity", deduplicate=self.group_deduplicate, main="name", extra="description") # Reusable fields group_represent = lambda id: (id and [db.pr_group[id].name] or [NONE])[0] group_id = S3ReusableField("group_id", db.pr_group, sortby="name", requires = IS_NULL_OR(IS_ONE_OF(db, "pr_group.id", "%(id)s: %(name)s", filterby="system", filter_opts=(False,))), represent = group_represent, comment = \ DIV(A(s3.crud_strings.pr_group.label_create_button, _class="colorbox", _href=URL(c="pr", f="group", args="create", vars=dict(format="popup")), _target="top", _title=s3.crud_strings.pr_group.label_create_button), DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Create Group Entry"), T("Create a group entry in the registry."))))), ondelete = "RESTRICT") # Components self.add_component("pr_group_membership", pr_group="group_id") # --------------------------------------------------------------------- # Group membership # resourcename = "group_membership" tablename = "pr_group_membership" table = define_table(tablename, group_id(label = T("Group"), ondelete="CASCADE"), person_id(label = T("Person"), ondelete="CASCADE"), Field("group_head", "boolean", label = T("Group Head"), default=False), Field("description", label = T("Description")), comments(), *meta_fields()) # Field configuration table.group_head.represent = lambda group_head: \ (group_head and [T("yes")] or [""])[0] # CRUD strings request = current.request if request.function in ("person", "group_membership"): crud_strings[tablename] = Storage( title_create = T("Add Membership"), title_display = T("Membership Details"), title_list = T("Memberships"), title_update = T("Edit Membership"), title_search = T("Search Membership"), subtitle_create = T("Add New Membership"), subtitle_list = T("Current Memberships"), label_list_button = T("List All Memberships"), label_create_button = T("Add Membership"), label_delete_button = T("Delete Membership"), msg_record_created = T("Membership added"), msg_record_modified = T("Membership updated"), msg_record_deleted = T("Membership deleted"), msg_list_empty = T("No Memberships currently registered")) elif request.function == "group": crud_strings[tablename] = Storage( title_create = T("Add Member"), title_display = T("Membership Details"), title_list = T("Group Members"), title_update = T("Edit Membership"), title_search = T("Search Member"), subtitle_create = T("Add New Member"), subtitle_list = T("Current Group Members"), label_list_button = T("List Members"), label_create_button = T("Add Group Member"), label_delete_button = T("Delete Membership"), msg_record_created = T("Group Member added"), msg_record_modified = T("Membership updated"), msg_record_deleted = T("Membership deleted"), msg_list_empty = T("No Members currently registered")) # Resource configuration configure(tablename, onaccept = self.group_membership_onaccept, ondelete = self.group_membership_onaccept, list_fields=["id", "group_id", "person_id", "group_head", "description" ]) # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage( pr_group_id = group_id, pr_group_represent = group_represent, pr_mailing_list_crud_strings = mailing_list_crud_strings ) # ------------------------------------------------------------------------- @staticmethod def group_deduplicate(item): """ Group de-duplication """ if item.id: return if item.tablename == "pr_group": table = item.table name = item.data.get("name", None) query = (table.name == name) & \ (table.deleted != True) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ------------------------------------------------------------------------- @staticmethod def group_membership_onaccept(form): """ Remove any duplicate memberships and update affiliations """ db = current.db s3db = current.s3db mtable = s3db.pr_group_membership if hasattr(form, "vars"): _id = form.vars.id elif isinstance(form, Row) and "id" in form: _id = form.id else: return if _id: record = db(mtable.id == _id).select(limitby=(0, 1)).first() else: return if record: person_id = record.person_id group_id = record.group_id if person_id and group_id and not record.deleted: query = (mtable.person_id == person_id) & \ (mtable.group_id == group_id) & \ (mtable.id != record.id) & \ (mtable.deleted != True) deleted_fk = {"person_id": person_id, "group_id": group_id} db(query).update(deleted = True, person_id = None, group_id = None, deleted_fk = json.dumps(deleted_fk)) pr_update_affiliations(mtable, record) return # ============================================================================= class S3ContactModel(S3Model): """ Person Contacts """ names = ["pr_contact", "pr_contact_emergency" ] def model(self): T = current.T db = current.db msg = current.msg request = current.request s3 = current.response.s3 UNKNOWN_OPT = current.messages.UNKNOWN_OPT comments = s3.comments define_table = self.define_table meta_fields = s3.meta_fields super_link = self.super_link # --------------------------------------------------------------------- # Contact # # @ToDo: Provide widgets which can be dropped into the main person form to have # the relevant ones for that deployment/context collected on that same # form # contact_methods = msg.CONTACT_OPTS tablename = "pr_contact" table = define_table(tablename, super_link("pe_id", "pr_pentity"), Field("contact_method", length=32, requires = IS_IN_SET(contact_methods, zero=None), default = "SMS", label = T("Contact Method"), represent = lambda opt: \ contact_methods.get(opt, UNKNOWN_OPT)), Field("value", label= T("Value"), notnull=True, requires = IS_NOT_EMPTY()), Field("priority", "integer", label= T("Priority"), comment = DIV(_class="tooltip", _title="%s|%s" % (T("Priority"), T("What order to be contacted in."))), requires = IS_IN_SET(range(1, 10), zero=None)), comments(), *meta_fields()) # Field configuration table.pe_id.requires = IS_ONE_OF(db, "pr_pentity.pe_id", pr_pentity_represent, orderby="instance_type", filterby="instance_type", filter_opts=("pr_person", "pr_group")) # CRUD Strings s3.crud_strings[tablename] = Storage( title_create = T("Add Contact Information"), title_display = T("Contact Details"), title_list = T("Contact Information"), title_update = T("Edit Contact Information"), title_search = T("Search Contact Information"), subtitle_create = T("Add Contact Information"), subtitle_list = T("Contact Information"), label_list_button = T("List Contact Information"), label_create_button = T("Add Contact Information"), label_delete_button = T("Delete Contact Information"), msg_record_created = T("Contact Information Added"), msg_record_modified = T("Contact Information Updated"), msg_record_deleted = T("Contact Information Deleted"), msg_list_empty = T("No contact information available")) # Resource configuration self.configure(tablename, onvalidation=self.contact_onvalidation, deduplicate=self.contact_deduplicate, list_fields=["id", "contact_method", "value", "priority", ]) # --------------------------------------------------------------------- # Emergency Contact Information # tablename = "pr_contact_emergency" table = define_table(tablename, super_link("pe_id", "pr_pentity"), Field("name", label= T("Name")), Field("relationship", label= T("Relationship")), Field("phone", label = T("Phone"), requires = IS_NULL_OR(s3_phone_requires)), comments(), *meta_fields()) # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage( ) # ------------------------------------------------------------------------- @staticmethod def contact_onvalidation(form): """ Contact form validation """ if form.vars.contact_method == "EMAIL": email, error = IS_EMAIL()(form.vars.value) if error: form.errors.value = T("Enter a valid email") return False # ------------------------------------------------------------------------- @staticmethod def contact_deduplicate(item): """ Contact information de-duplication """ if item.id: return if item.tablename == "pr_contact": table = item.table pe_id = item.data.get("pe_id", None) contact_method = item.data.get("contact_method", None) value = item.data.get("value", None) if pe_id is None: return query = (table.pe_id == pe_id) & \ (table.contact_method == contact_method) & \ (table.value == value) & \ (table.deleted != True) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ============================================================================= class S3PersonAddressModel(S3Model): """ Addresses for Persons """ names = ["pr_address", "pr_address_type_opts" ] def model(self): T = current.T db = current.db request = current.request s3 = current.response.s3 location_id = self.gis_location_id UNKNOWN_OPT = current.messages.UNKNOWN_OPT # --------------------------------------------------------------------- # Address # pr_address_type_opts = { 1:T("Home Address"), 2:T("Office Address"), 3:T("Holiday Address"), 9:T("other") } tablename = "pr_address" table = self.define_table(tablename, self.super_link("pe_id", "pr_pentity"), Field("type", "integer", requires = IS_IN_SET(pr_address_type_opts, zero=None), widget = RadioWidget.widget, default = 1, label = T("Address Type"), represent = lambda opt: \ pr_address_type_opts.get(opt, UNKNOWN_OPT)), location_id(), s3.comments(), *(s3.address_fields() + s3.meta_fields())) table.pe_id.requires = IS_ONE_OF(db, "pr_pentity.pe_id", pr_pentity_represent, orderby="instance_type", filterby="instance_type", filter_opts=("pr_person", "pr_group")) # Field configuration if not self.settings.get_gis_building_name(): table.building_name.readable = False # CRUD Strings ADD_ADDRESS = T("Add Address") LIST_ADDRESS = T("List of addresses") s3.crud_strings[tablename] = Storage( title_create = ADD_ADDRESS, title_display = T("Address Details"), title_list = LIST_ADDRESS, title_update = T("Edit Address"), title_search = T("Search Addresses"), subtitle_create = T("Add New Address"), subtitle_list = T("Addresses"), label_list_button = LIST_ADDRESS, label_create_button = ADD_ADDRESS, msg_record_created = T("Address added"), msg_record_modified = T("Address updated"), msg_record_deleted = T("Address deleted"), msg_list_empty = T("There is no address for this person yet. Add new address.")) # Resource configuration self.configure(tablename, onaccept=self.address_onaccept, onvalidation=s3.address_onvalidation, deduplicate=self.address_deduplicate, list_fields = ["id", "type", "address", "postcode", #"L4", "L3", "L2", "L1", "L0" ]) # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage( pr_address_type_opts = pr_address_type_opts ) # ------------------------------------------------------------------------- @staticmethod def address_onaccept(form): """ Updates the Base Location to be the same as the Address If the base location hasn't yet been set or if this is specifically requested """ db = current.db s3db = current.s3db request = current.request lx_update = current.response.s3.lx_update vars = form.vars location_id = vars.location_id pe_id = vars.pe_id if location_id: person = None table = s3db.pr_person if "base_location" in request.vars and \ request.vars.base_location == "on": # Specifically requested S3Tracker()(s3db.pr_pentity, pe_id).set_base_location(location_id) person = db(table.pe_id == pe_id).select(table.id, limitby=(0, 1)).first() if person: # Update the Lx fields lx_update(table, person.id) else: # Check if a base location already exists query = (table.pe_id == pe_id) person = db(query).select(table.id, table.location_id).first() if person and not person.location_id: # Hasn't yet been set so use this S3Tracker()(s3db.pr_pentity, pe_id).set_base_location(location_id) # Update the Lx fields lx_update(table, person.id) if person and str(vars.type) == "1": # Home Address # Also check for any Volunteer HRM record(s) htable = s3db.hrm_human_resource query = (htable.person_id == person.id) & \ (htable.type == 2) & \ (htable.deleted != True) hrs = db(query).select(htable.id) for hr in hrs: db(htable.id == hr.id).update(location_id=location_id) # Update the Lx fields lx_update(htable, hr.id) return # ------------------------------------------------------------------------- @staticmethod def address_deduplicate(item): """ Address de-duplication """ if item.id: return if item.tablename == "pr_address": table = item.table pe_id = item.data.get("pe_id", None) address = item.data.get("address", None) if pe_id is None: return query = (table.pe_id == pe_id) & \ (table.address == address) & \ (table.deleted != True) duplicate = current.db(query).select(table.id, limitby=(0, 1)).first() if duplicate: item.id = duplicate.id item.method = item.METHOD.UPDATE return # ============================================================================= class S3PersonImageModel(S3Model): """ Images for Persons """ names = ["pr_image"] def model(self): T = current.T db = current.db request = current.request s3 = current.response.s3 UNKNOWN_OPT = current.messages.UNKNOWN_OPT # --------------------------------------------------------------------- # Image # pr_image_type_opts = { 1:T("Photograph"), 2:T("Sketch"), 3:T("Fingerprint"), 4:T("X-Ray"), 5:T("Document Scan"), 9:T("other") } tablename = "pr_image" table = self.define_table(tablename, self.super_link("pe_id", "pr_pentity"), Field("profile", "boolean", default = False, label = T("Profile Picture?") ), Field("type", "integer", requires = IS_IN_SET(pr_image_type_opts, zero=None), default = 1, label = T("Image Type"), represent = lambda opt: pr_image_type_opts.get(opt, UNKNOWN_OPT)), Field("title", label=T("Title"), requires = IS_NOT_EMPTY(), comment = DIV(_class="tooltip", _title="%s|%s" % (T("Title"), T("Specify a descriptive title for the image.")))), Field("image", "upload", autodelete=True, represent = lambda image: image and \ DIV(A(IMG(_src=URL(c="default", f="download", args=image), _height=60, _alt=T("View Image")), _href=URL(c="default", f="download", args=image))) or T("No Image"), comment = DIV(_class="tooltip", _title="%s|%s" % (T("Image"), T("Upload an image file here. If you don't upload an image file, then you must specify its location in the URL field.")))), Field("url", label = T("URL"), represent = lambda url: url and \ DIV(A(IMG(_src=url, _height=60), _href=url)) or T("None"), comment = DIV(_class="tooltip", _title="%s|%s" % (T("URL"), T("The URL of the image file. If you don't upload an image file, then you must specify its location here.")))), Field("description", label=T("Description"), comment = DIV(_class="tooltip", _title="%s|%s" % (T("Description"), T("Give a brief description of the image, e.g. what can be seen where on the picture (optional).")))), s3.comments(), *s3.meta_fields()) # CRUD Strings LIST_IMAGES = T("List Images") s3.crud_strings[tablename] = Storage( title_create = T("Image"), title_display = T("Image Details"), title_list = LIST_IMAGES, title_update = T("Edit Image Details"), title_search = T("Search Images"), subtitle_create = T("Add New Image"), subtitle_list = T("Images"), label_list_button = LIST_IMAGES, label_create_button = T("Add Image"), label_delete_button = T("Delete Image"), msg_record_created = T("Image added"), msg_record_modified = T("Image updated"), msg_record_deleted = T("Image deleted"), msg_list_empty = T("No Images currently registered")) # Resource configuration self.configure(tablename, onaccept = self.pr_image_onaccept, onvalidation = self.pr_image_onvalidation, mark_required = ["url", "image"], list_fields=["id", "title", "profile", "type", "image", "url", "description" ]) # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage() # ------------------------------------------------------------------------- @staticmethod def pr_image_onaccept(form): """ If this is the profile image then remove this flag from all others for this person. """ db = current.db s3db = current.s3db table = s3db.pr_image vars = form.vars id = vars.id profile = vars.profile url = vars.url newfilename = vars.image_newfilename if profile == 'False': profile = False if newfilename and not url: # Provide the link to the file in the URL field url = URL(c="default", f="download", args=newfilename) query = (table.id == id) db(query).update(url = url) if profile: # Find the pe_id query = (table.id == id) pe = db(query).select(table.pe_id, limitby=(0, 1)).first() if pe: pe_id = pe.pe_id # Set all others for this person as not the Profile picture query = (table.pe_id == pe_id) & \ (table.id != id) db(query).update(profile = False) # ------------------------------------------------------------------------- @staticmethod def pr_image_onvalidation(form): """ Image form validation """ db = current.db s3db = current.s3db request = current.request vars = form.vars table = s3db.pr_image image = vars.image url = vars.url if not hasattr(image, "file"): id = request.post_vars.id if id: record = db(table.id == id).select(table.image, limitby=(0, 1)).first() if record: image = record.image if not hasattr(image, "file") and not image and not url: form.errors.image = \ form.errors.url = T("Either file upload or image URL required.") return # ============================================================================= class S3PersonIdentityModel(S3Model): """ Identities for Persons """ names = ["pr_identity"] def model(self): T = current.T db = current.db request = current.request s3 = current.response.s3 person_id = self.pr_person_id UNKNOWN_OPT = current.messages.UNKNOWN_OPT # --------------------------------------------------------------------- # Identity # # http://docs.oasis-open.org/emergency/edxl-have/cs01/xPIL-types.xsd # <xs:simpleType name="DocumentTypeList"> # <xs:enumeration value="Passport"/> # <xs:enumeration value="DriverLicense"/> # <xs:enumeration value="CreditCard"/> # <xs:enumeration value="BankCard"/> # <xs:enumeration value="KeyCard"/> # <xs:enumeration value="AccessCard"/> # <xs:enumeration value="IdentificationCard"/> # <xs:enumeration value="Certificate"/> # <xs:enumeration value="MileageProgram"/> # pr_id_type_opts = { 1:T("Passport"), 2:T("National ID Card"), 3:T("Driving License"), #4:T("Credit Card"), 99:T("other") } tablename = "pr_identity" table = self.define_table(tablename, person_id(label = T("Person"), ondelete="CASCADE"), Field("type", "integer", requires = IS_IN_SET(pr_id_type_opts, zero=None), default = 1, label = T("ID type"), represent = lambda opt: \ pr_id_type_opts.get(opt, UNKNOWN_OPT)), Field("value"), Field("description"), Field("country_code", length=4), Field("ia_name", label = T("Issuing Authority")), #Field("ia_subdivision"), # Name of issuing authority subdivision #Field("ia_code"), # Code of issuing authority (if any) s3.comments(), *s3.meta_fields()) # Field configuration table.value.requires = [IS_NOT_EMPTY(), IS_NOT_ONE_OF(db, "%s.value" % tablename)] # CRUD Strings ADD_IDENTITY = T("Add Identity") s3.crud_strings[tablename] = Storage( title_create = ADD_IDENTITY, title_display = T("Identity Details"), title_list = T("Known Identities"), title_update = T("Edit Identity"), title_search = T("Search Identity"), subtitle_create = T("Add New Identity"), subtitle_list = T("Current Identities"), label_list_button = T("List Identities"), label_create_button = ADD_IDENTITY, msg_record_created = T("Identity added"), msg_record_modified = T("Identity updated"), msg_record_deleted = T("Identity deleted"), msg_list_empty = T("No Identities currently registered")) # Resource configuration self.configure(tablename, list_fields=["id", "type", "type", "value", "country_code", "ia_name" ]) # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage() # ============================================================================= class S3SavedSearch(S3Model): """ Saved Searches """ names = ["pr_save_search"] def model(self): T = current.T db = current.db auth = current.auth request = current.request s3 = current.response.s3 pr_person = self.table("pr_person") person_id = s3.pr_person_id # --------------------------------------------------------------------- # Saved Searches # tablename = "pr_save_search" table = self.define_table(tablename, Field("user_id", "integer", readable = False, writable = False, default = auth.user_id), Field("search_vars","text", label = T("Search Criteria"), represent=lambda id:self.search_vars_represent(id)), Field("subscribed","boolean", default=False), person_id(label = T("Person"), ondelete="CASCADE", default = auth.s3_logged_in_person()), *s3.meta_fields()) # CRUD Strings s3.crud_strings[tablename] = Storage( title_create = T("Save Search"), title_display = T("Saved Search Details"), title_list = T("Saved Searches"), title_update = T("Edit Saved Search"), title_search = T("Search Saved Searches"), subtitle_create = T("Add Saved Search"), subtitle_list = T("Saved Searches"), label_list_button = T("List Saved Searches"), label_create_button = T("Save Search"), label_delete_button = T("Delete Saved Search"), msg_record_created = T("Saved Search added"), msg_record_modified = T("Saved Search updated"), msg_record_deleted = T("Saved Search deleted"), msg_list_empty = T("No Search saved")) # Resource configuration self.configure(tablename, insertable = False, editable = False, listadd = False, deletable = True, list_fields=["search_vars"]) # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage() # ------------------------------------------------------------------------- @staticmethod def search_vars_represent(search_vars): """ Represent the search criteria @param search_vars: the pr_save_search record ID @ToDo: Modify this function so that it displays a Human Readable representation of the criteria Move this function to modules/s3/s3search Use this function in controllers/msg instead of re-defining it there """ import cPickle import re s = "" search_vars = search_vars.replace("&apos;", "'") try: search_vars = cPickle.loads(str(search_vars)) except: raise HTTP(500,"ERROR RETRIEVING THE SEARCH CRITERIA") s = "<p>" pat = '_' for var in search_vars.iterkeys(): if var == "criteria" : c_dict = search_vars[var] #s = s + crud_string("pr_save_search", "Search Criteria") for j in c_dict.iterkeys(): if not re.match(pat,j): st = str(j) st = st.replace("_search_", " ") st = st.replace("_advanced", "") st = st.replace("_simple", "") st = st.replace("text", "text matching") """st = st.replace(search_vars["function"], "") st = st.replace(search_vars["prefix"], "")""" st = st.replace("_", " ") s = "%s <b> %s </b>: %s <br />" %(s, st.capitalize(), str(c_dict[j])) elif var == "simple" or var == "advanced": continue else: if var == "function": v1 = "Resource Name" elif var == "prefix": v1 = "Module" s = "%s<b>%s</b>: %s<br />" %(s, v1, str(search_vars[var])) s = s + "</p>" return XML(s) # ============================================================================= class S3PersonPresence(S3Model): """ Presence Log for Persons @todo: deprecate currently still used by CR """ names = ["pr_presence", "pr_trackable_types", "pr_default_trackable", "pr_presence_opts", "pr_presence_conditions", "pr_default_presence"] def model(self): T = current.T db = current.db auth = current.auth request = current.request s3 = current.response.s3 person_id = self.pr_person_id location_id = self.gis_location_id ADD_LOCATION = current.messages.ADD_LOCATION UNKNOWN_OPT = current.messages.UNKNOWN_OPT datetime_represent = S3DateTime.datetime_represent # Trackable types pr_trackable_types = { 1:current.T("Person"), # an individual 2:current.T("Group"), # a group 3:current.T("Body"), # a dead body or body part 4:current.T("Object"), # other objects belonging to persons 5:current.T("Organization"), # an organisation 6:current.T("Office"), # an office } pr_default_trackable = 1 # Presence conditions pr_presence_opts = Storage( SEEN = 1, TRANSIT = 2, PROCEDURE = 3, TRANSITIONAL_PRESENCE = (1, 2, 3), CHECK_IN = 11, CONFIRMED = 12, DECEASED = 13, LOST = 14, PERSISTANT_PRESENCE = (11, 12, 13, 14), TRANSFER = 21, CHECK_OUT = 22, ABSENCE = (21, 22), MISSING = 99 ) opts = pr_presence_opts pr_presence_conditions = Storage({ # Transitional presence conditions: opts.SEEN: current.T("Seen"), # seen (formerly "found") at location opts.TRANSIT: current.T("Transit"), # seen at location, between two transfers opts.PROCEDURE: current.T("Procedure"), # seen at location, undergoing procedure ("Checkpoint") # Persistant presence conditions: opts.CHECK_IN: current.T("Check-In"), # arrived at location for accomodation/storage opts.CONFIRMED: current.T("Confirmed"), # confirmation of stay/storage at location opts.DECEASED: current.T("Deceased"), # deceased opts.LOST: current.T("Lost"), # destroyed/disposed at location # Absence conditions: opts.TRANSFER: current.T("Transfer"), # Send to another location opts.CHECK_OUT: current.T("Check-Out"), # Left location for unknown destination # Missing condition: opts.MISSING: current.T("Missing"), # Missing (from a "last-seen"-location) }) pr_default_presence = 1 resourcename = "presence" tablename = "pr_presence" table = self.define_table(tablename, self.super_link("pe_id", "pr_pentity"), self.super_link("sit_id", "sit_situation"), person_id("observer", label=T("Observer"), default = auth.s3_logged_in_person(), comment=pr_person_comment(title=T("Observer"), comment=T("Person who has actually seen the person/group."), child="observer")), Field("shelter_id", "integer", readable = False, writable = False), location_id(widget = S3LocationAutocompleteWidget(), comment = DIV(A(ADD_LOCATION, _class="colorbox", _target="top", _title=ADD_LOCATION, _href=URL(c="gis", f="location", args="create", vars=dict(format="popup"))), DIV(_class="tooltip", _title="%s|%s" % (T("Current Location"), T("The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations."))))), Field("location_details", comment = DIV(_class="tooltip", _title="%s|%s" % (T("Location Details"), T("Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.")))), Field("datetime", "datetime", label = T("Date/Time"), default = request.utcnow, requires = IS_UTC_DATETIME(allow_future=False), widget = S3DateTimeWidget(future=0), represent = lambda val: datetime_represent(val, utc=True)), Field("presence_condition", "integer", requires = IS_IN_SET(pr_presence_conditions, zero=None), default = pr_default_presence, label = T("Presence Condition"), represent = lambda opt: \ pr_presence_conditions.get(opt, UNKNOWN_OPT)), Field("proc_desc", label = T("Procedure"), comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Procedure"), T('Describe the procedure which this record relates to (e.g. "medical examination")'))))), location_id("orig_id", label=T("Origin"), widget = S3LocationAutocompleteWidget(), comment = DIV(A(ADD_LOCATION, _class="colorbox", _target="top", _title=ADD_LOCATION, _href=URL(c="gis", f="location", args="create", vars=dict(format="popup"))), DIV(_class="tooltip", _title="%s|%s" % (T("Origin"), T("The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations."))))), location_id("dest_id", label=T("Destination"), widget = S3LocationAutocompleteWidget(), comment = DIV(A(ADD_LOCATION, _class="colorbox", _target="top", _title=ADD_LOCATION, _href=URL(c="gis", f="location", args="create", vars=dict(format="popup"))), DIV(_class="tooltip", _title="%s|%s" % (T("Destination"), T("The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations."))))), Field("comment"), Field("closed", "boolean", default=False, readable = False, writable = False), *s3.meta_fields()) # CRUD Strings ADD_LOG_ENTRY = T("Add Log Entry") s3.crud_strings[tablename] = Storage( title_create = ADD_LOG_ENTRY, title_display = T("Log Entry Details"), title_list = T("Presence Log"), title_update = T("Edit Log Entry"), title_search = T("Search Log Entry"), subtitle_create = T("Add New Log Entry"), subtitle_list = T("Current Log Entries"), label_list_button = T("List Log Entries"), label_create_button = ADD_LOG_ENTRY, msg_record_created = T("Log entry added"), msg_record_modified = T("Log entry updated"), msg_record_deleted = T("Log entry deleted"), msg_list_empty = T("No Presence Log Entries currently registered")) # Resource configuration self.configure(tablename, super_entity = "sit_situation", onvalidation = self.presence_onvalidation, onaccept = self.presence_onaccept, delete_onaccept = self.presence_onaccept, list_fields = ["id", "datetime", "location_id", "shelter_id", "presence_condition", "orig_id", "dest_id" ], main="time", extra="location_details") # --------------------------------------------------------------------- # Return model-global names to response.s3 # return Storage( pr_trackable_types=pr_trackable_types, pr_default_trackable=pr_default_trackable, pr_presence_opts=pr_presence_opts, pr_presence_conditions=pr_presence_conditions, pr_default_presence=pr_default_presence ) # ------------------------------------------------------------------------- @staticmethod def presence_onvalidation(form): """ Presence record validation """ db = current.db s3db = current.s3db s3 = current.response.s3 table = s3db.pr_presence popts = s3.pr_presence_opts shelter_table = s3db.cr_shelter location = form.vars.location_id shelter = form.vars.shelter_id if shelter and shelter_table is not None: set = db(shelter_table.id == shelter) row = set.select(shelter_table.location_id, limitby=(0, 1)).first() if row: location = form.vars.location_id = row.location_id else: shelter = None if location or shelter: return condition = form.vars.presence_condition if condition: try: condition = int(condition) except ValueError: condition = None else: condition = table.presence_condition.default form.vars.presence_condition = condition if condition: if condition in popts.PERSISTANT_PRESENCE or \ condition in popts.ABSENCE: if not form.vars.id: if table.location_id.default or \ table.shelter_id.default: return else: record = db(table.id == form.vars.id).select(table.location_id, table.shelter_id, limitby=(0, 1)).first() if record and \ record.location_id or record.shelter_id: return else: return else: return form.errors.location_id = \ form.errors.shelter_id = T("Either a shelter or a location must be specified") return # ------------------------------------------------------------------------- @staticmethod def presence_onaccept(form): """ Update the presence log of a person entity - mandatory to be called as onaccept routine at any modification of pr_presence records """ db = current.db s3db = current.s3db s3 = current.response.s3 table = s3db.pr_presence popts = s3.pr_presence_opts if isinstance(form, (int, long, str)): id = form elif hasattr(form, "vars"): id = form.vars.id else: id = form.id presence = db(table.id == id).select(table.ALL, limitby=(0,1)).first() if not presence: return else: condition = presence.presence_condition pe_id = presence.pe_id datetime = presence.datetime if not datetime or not pe_id: return this_entity = ((table.pe_id == pe_id) & (table.deleted == False)) earlier = (table.datetime < datetime) later = (table.datetime > datetime) same_place = ((table.location_id == presence.location_id) | (table.shelter_id == presence.shelter_id)) is_present = (table.presence_condition.belongs(popts.PERSISTANT_PRESENCE)) is_absent = (table.presence_condition.belongs(popts.ABSENCE)) is_missing = (table.presence_condition == popts.MISSING) if not presence.deleted: if condition in popts.TRANSITIONAL_PRESENCE: if presence.closed: db(table.id == id).update(closed=False) elif condition in popts.PERSISTANT_PRESENCE: if not presence.closed: query = this_entity & earlier & (is_present | is_missing) & \ (table.closed == False) db(query).update(closed=True) query = this_entity & later & \ (is_present | (is_absent & same_place)) if db(query).count(): db(table.id == id).update(closed=True) elif condition in popts.ABSENCE: query = this_entity & earlier & is_present & same_place db(query).update(closed=True) if not presence.closed: db(table.id == id).update(closed=True) if not presence.closed: # Re-open the last persistant presence if no closing event query = this_entity & is_present presence = db(query).select(table.ALL, orderby=~table.datetime, limitby=(0,1)).first() if presence and presence.closed: later = (table.datetime > presence.datetime) query = this_entity & later & is_absent & same_place if not db(query).count(): db(table.id == presence.id).update(closed=False) # Re-open the last missing if no later persistant presence query = this_entity & is_missing presence = db(query).select(table.ALL, orderby=~table.datetime, limitby=(0,1)).first() if presence and presence.closed: later = (table.datetime > presence.datetime) query = this_entity & later & is_present if not db(query).count(): db(table.id == presence.id).update(closed=False) pentity = db(db.pr_pentity.pe_id == pe_id).select(db.pr_pentity.instance_type, limitby=(0,1)).first() if pentity and pentity.instance_type == "pr_person": query = this_entity & is_missing & (table.closed == False) if db(query).count(): db(db.pr_person.pe_id == pe_id).update(missing = True) else: db(db.pr_person.pe_id == pe_id).update(missing = False) return # ============================================================================= class S3PersonDescription(S3Model): """ Additional tables for DVI/MPR """ names = ["pr_note", "pr_physical_description"] def model(self): T = current.T db = current.db auth = current.auth request = current.request s3 = current.response.s3 person_id = self.pr_person_id location_id = self.gis_location_id UNKNOWN_OPT = current.messages.UNKNOWN_OPT #if deployment_settings.has_module("dvi") or \ #deployment_settings.has_module("mpr"): # --------------------------------------------------------------------- # Note # person_status = { 1: T("missing"), 2: T("found"), 3: T("deceased"), 9: T("none") } resourcename = "note" tablename = "pr_note" table = self.define_table(tablename, self.super_link("pe_id", "pr_pentity"), # Reporter #person_id("reporter"), Field("confirmed", "boolean", default=False, readable=False, writable=False), Field("closed", "boolean", default=False, readable=False, writable=False), Field("status", "integer", requires=IS_IN_SET(person_status, zero=None), default=9, label=T("Status"), represent=lambda opt: \ person_status.get(opt, UNKNOWN_OPT)), Field("timestmp", "datetime", label=T("Date/Time"), requires=[IS_EMPTY_OR(IS_UTC_DATETIME_IN_RANGE())], widget = S3DateTimeWidget(), default=request.utcnow), Field("note_text", "text", label=T("Text")), Field("note_contact", "text", label=T("Contact Info"), readable=False, writable=False), location_id(label=T("Last known location")), *s3.meta_fields()) # CRUD strings ADD_NOTE = T("New Entry") s3.crud_strings[tablename] = Storage( title_create = ADD_NOTE, title_display = T("Journal Entry Details"), title_list = T("Journal"), title_update = T("Edit Entry"), title_search = T("Search Entries"), subtitle_create = T("Add New Entry"), subtitle_list = T("Current Entries"), label_list_button = T("See All Entries"), label_create_button = ADD_NOTE, msg_record_created = T("Journal entry added"), msg_record_modified = T("Journal entry updated"), msg_record_deleted = T("Journal entry deleted"), msg_list_empty = T("No entry available")) # Resource configuration self.configure(tablename, list_fields=["id", "timestmp", "location_id", "note_text", "status"], editable=False, onaccept=self.note_onaccept, ondelete=self.note_onaccept) # ===================================================================== # Physical Description # pr_race_opts = { 1: T("caucasoid"), 2: T("mongoloid"), 3: T("negroid"), 99: T("other") } pr_complexion_opts = { 1: T("light"), 2: T("medium"), 3: T("dark"), 99: T("other") } pr_height_opts = { 1: T("short"), 2: T("average"), 3: T("tall") } pr_weight_opts = { 1: T("slim"), 2: T("average"), 3: T("fat") } # http://docs.oasis-open.org/emergency/edxl-have/cs01/xPIL-types.xsd pr_blood_type_opts = ("A+", "A-", "B+", "B-", "AB+", "AB-", "0+", "0-") pr_eye_color_opts = { 1: T("blue"), 2: T("grey"), 3: T("green"), 4: T("brown"), 5: T("black"), 99: T("other") } pr_hair_color_opts = { 1: T("blond"), 2: T("brown"), 3: T("black"), 4: T("red"), 5: T("grey"), 6: T("white"), 99: T("see comment") } pr_hair_style_opts = { 1: T("straight"), 2: T("wavy"), 3: T("curly"), 99: T("see comment") } pr_hair_length_opts = { 1: T("short<6cm"), 2: T("medium<12cm"), 3: T("long>12cm"), 4: T("shaved"), 99: T("see comment") } pr_hair_baldness_opts = { 1: T("forehead"), 2: T("sides"), 3: T("tonsure"), 4: T("total"), 99: T("see comment") } pr_facial_hair_type_opts = { 1: T("none"), 2: T("Moustache"), 3: T("Goatee"), 4: T("Whiskers"), 5: T("Full beard"), 99: T("see comment") } pr_facial_hair_length_opts = { 1: T("short"), 2: T("medium"), 3: T("long"), 4: T("shaved") } # This set is suitable for use in the US pr_ethnicity_opts = [ "American Indian", "Alaskan", "Asian", "African American", "Hispanic or Latino", "Native Hawaiian", "Pacific Islander", "Two or more", "Unspecified", "White" ] resourcename = "physical_description" tablename = "pr_physical_description" table = self.define_table(tablename, self.super_link("pe_id", "pr_pentity"), # Race and complexion Field("race", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_race_opts)), label = T("Race"), represent = lambda opt: \ pr_race_opts.get(opt, UNKNOWN_OPT)), Field("complexion", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_complexion_opts)), label = T("Complexion"), represent = lambda opt: \ pr_complexion_opts.get(opt, UNKNOWN_OPT)), Field("ethnicity", #requires=IS_NULL_OR(IS_IN_SET(pr_ethnicity_opts)), length=64), # Mayon Compatibility # Height and weight Field("height", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_height_opts)), label = T("Height"), represent = lambda opt: \ pr_height_opts.get(opt, UNKNOWN_OPT)), Field("height_cm", "integer", requires = IS_NULL_OR(IS_INT_IN_RANGE(0, 300)), label = T("Height (cm)")), Field("weight", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_weight_opts)), label = T("Weight"), represent = lambda opt: \ pr_weight_opts.get(opt, UNKNOWN_OPT)), Field("weight_kg", "integer", requires = IS_NULL_OR(IS_INT_IN_RANGE(0, 500)), label = T("Weight (kg)")), # Blood type, eye color Field("blood_type", requires = IS_EMPTY_OR(IS_IN_SET(pr_blood_type_opts)), label = T("Blood Type (AB0)"), represent = lambda opt: opt or UNKNOWN_OPT), Field("eye_color", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_eye_color_opts)), label = T("Eye Color"), represent = lambda opt: \ pr_eye_color_opts.get(opt, UNKNOWN_OPT)), # Hair of the head Field("hair_color", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_hair_color_opts)), label = T("Hair Color"), represent = lambda opt: \ pr_hair_color_opts.get(opt, UNKNOWN_OPT)), Field("hair_style", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_hair_style_opts)), label = T("Hair Style"), represent = lambda opt: \ pr_hair_style_opts.get(opt, UNKNOWN_OPT)), Field("hair_length", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_hair_length_opts)), label = T("Hair Length"), represent = lambda opt: \ pr_hair_length_opts.get(opt, UNKNOWN_OPT)), Field("hair_baldness", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_hair_baldness_opts)), label = T("Baldness"), represent = lambda opt: \ pr_hair_baldness_opts.get(opt, UNKNOWN_OPT)), Field("hair_comment"), # Facial hair Field("facial_hair_type", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_facial_hair_type_opts)), label = T("Facial hair, type"), represent = lambda opt: \ pr_facial_hair_type_opts.get(opt, UNKNOWN_OPT)), Field("facial_hair_color", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_hair_color_opts)), label = T("Facial hair, color"), represent = lambda opt: \ pr_hair_color_opts.get(opt, UNKNOWN_OPT)), Field("facial_hair_length", "integer", requires = IS_EMPTY_OR(IS_IN_SET(pr_facial_hair_length_opts)), label = T("Facial hear, length"), represent = lambda opt: \ pr_facial_hair_length_opts.get(opt, UNKNOWN_OPT)), Field("facial_hair_comment"), # Body hair and skin marks Field("body_hair"), Field("skin_marks", "text"), # Medical Details: scars, amputations, implants Field("medical_conditions", "text"), # Other details Field("other_details", "text"), s3.comments(), *s3.meta_fields()) # Field configuration table.height_cm.comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Height"), T("The body height (crown to heel) in cm.")))) table.weight_kg.comment = DIV(DIV(_class="tooltip", _title="%s|%s" % (T("Weight"), T("The weight in kg.")))) table.pe_id.readable = False table.pe_id.writable = False # CRUD Strings # ? # Resource Configuration # ? # ------------------------------------------------------------------------- @staticmethod def note_onaccept(form): """ Update missing status for person """ db = current.db s3db = current.s3db pe_table = s3db.pr_pentity ntable = s3db.pr_note ptable = s3db.pr_person if isinstance(form, (int, long, str)): _id = form elif hasattr(form, "vars"): _id = form.vars.id else: _id = form.id note = ntable[_id] if not note: return query = (ntable.pe_id == note.pe_id) & \ (ntable.deleted != True) mq = query & ntable.status == 1 fq = query & ntable.status.belongs((2, 3)) mr = db(mq).select(ntable.id, ntable.timestmp, orderby=~ntable.timestmp, limitby=(0, 1)).first() fr = db(fq).select(ntable.id, ntable.timestmp, orderby=~ntable.timestmp, limitby=(0, 1)).first() missing = False if mr and not fr or fr.timestmp < mr.timestmp: missing = True query = ptable.pe_id == note.pe_id db(query).update(missing=missing) if note.deleted: try: location_id = form.location_id except: pass else: ttable = s3db.sit_presence query = (ptable.pe_id == note.pe_id) & \ (ttable.uuid == ptable.uuid) & \ (ttable.location_id == location_id) & \ (ttable.timestmp == note.timestmp) if note.location_id: tracker = S3Tracker() tracker(query).set_location(note.location_id, timestmp=note.timestmp) return # ============================================================================= # ============================================================================= # Representation Methods # def pr_pentity_represent(id, show_label=True, default_label="[No ID Tag]"): """ Represent a Person Entity in option fields or list views """ T = current.T db = current.db s3db = current.s3db if not id: return current.messages.NONE pe_str = T("None (no such record)") pe_table = s3db.pr_pentity pe = db(pe_table.pe_id == id).select(pe_table.instance_type, pe_table.pe_label, limitby=(0, 1)).first() if not pe: return pe_str instance_type = pe.instance_type instance_type_nice = pe_table.instance_type.represent(instance_type) table = s3db.table(instance_type, None) if not table: return pe_str label = pe.pe_label or default_label if instance_type == "pr_person": person = db(table.pe_id == id).select( table.first_name, table.middle_name, table.last_name, limitby=(0, 1)).first() if person: if show_label: pe_str = "%s %s (%s)" % (s3_fullname(person), label, instance_type_nice) else: pe_str = "%s (%s)" % (s3_fullname(person), instance_type_nice) elif instance_type == "pr_group": group = db(table.pe_id == id).select(table.name, limitby=(0, 1)).first() if group: pe_str = "%s (%s)" % (group.name, instance_type_nice) elif instance_type == "org_organisation": organisation = db(table.pe_id == id).select(table.name, limitby=(0, 1)).first() if organisation: pe_str = "%s (%s)" % (organisation.name, instance_type_nice) elif instance_type == "org_office": office = db(table.pe_id == id).select(table.name, limitby=(0, 1)).first() if office: pe_str = "%s (%s)" % (office.name, instance_type_nice) else: pe_str = "[%s] (%s)" % (label, instance_type_nice) return pe_str # ============================================================================= def pr_person_represent(id): """ Representation """ if not id: return current.messages.NONE db = current.db s3db = current.s3db cache = current.cache table = s3db.pr_person def _represent(id): if isinstance(id, Row): person = id id = person.id else: person = db(table.id == id).select(table.first_name, table.middle_name, table.last_name, limitby=(0, 1)).first() if person: return s3_fullname(person) else: return None name = cache.ram("pr_person_%s" % id, lambda: _represent(id), time_expire=10) return name # ============================================================================= def pr_person_comment(title=None, comment=None, caller=None, child=None): T = current.T if title is None: title = T("Person") if comment is None: comment = T("Type the first few characters of one of the Person's names.") if child is None: child = "person_id" return s3_popup_comment(c="pr", f="person", vars=dict(caller=caller, child=child), title=current.messages.ADD_PERSON, tooltip="%s|%s" % (title, comment)) # ============================================================================= def pr_contacts(r, **attr): """ Custom Method to provide the details for the Person's Contacts Tab: - provides a single view on: Addresses (pr_address) Contacts (pr_contact) Emergency Contacts @ToDo: Fix Map in Address' LocationSelector @ToDo: Allow Address Create's LocationSelector to work in Debug mode """ from itertools import groupby if r.http != "GET": r.error(405, current.manager.ERROR.BAD_METHOD) T = current.T db = current.db s3db = current.s3db person = r.record # Addresses atable = s3db.pr_address query = (atable.pe_id == person.pe_id) addresses = db(query).select(atable.id, atable.type, atable.building_name, atable.address, atable.postcode, orderby=atable.type) address_groups = {} for key, group in groupby(addresses, lambda a: a.type): address_groups[key] = list(group) address_wrapper = DIV(H2(T("Addresses")), DIV(A(T("Add"), _class="addBtn", _id="address-add"), _class="margin")) items = address_groups.items() opts = s3db.pr_address_type_opts for address_type, details in items: address_wrapper.append(H3(opts[address_type])) for detail in details: building_name = detail.building_name or "" if building_name: building_name = "%s, " % building_name address = detail.address or "" if address: address = "%s, " % address postcode = detail.postcode or "" address_wrapper.append(P( SPAN("%s%s%s" % (building_name, address, postcode)), A(T("Edit"), _class="editBtn fright"), _id="address-%s" % detail.id, _class="address", )) # Contacts ctable = s3db.pr_contact query = (ctable.pe_id == person.pe_id) contacts = db(query).select(ctable.id, ctable.value, ctable.contact_method, orderby=ctable.contact_method) contact_groups = {} for key, group in groupby(contacts, lambda c: c.contact_method): contact_groups[key] = list(group) contacts_wrapper = DIV(H2(T("Contacts")), DIV(A(T("Add"), _class="addBtn", _id="contact-add"), _class="margin")) items = contact_groups.items() opts = current.msg.CONTACT_OPTS for contact_type, details in items: contacts_wrapper.append(H3(opts[contact_type])) for detail in details: contacts_wrapper.append(P( SPAN(detail.value), A(T("Edit"), _class="editBtn fright"), _id="contact-%s" % detail.id, _class="contact", )) # Emergency Contacts etable = s3db.pr_contact_emergency query = (etable.pe_id == person.pe_id) & \ (etable.deleted == False) emergency = db(query).select(etable.id, etable.name, etable.relationship, etable.phone) emergency_wrapper = DIV(H2(T("Emergency Contacts")), DIV(A(T("Add"), _class="addBtn", _id="emergency-add"), _class="margin")) for contact in emergency: name = contact.name or "" if name: name = "%s, "% name relationship = contact.relationship or "" if relationship: relationship = "%s, "% relationship emergency_wrapper.append(P( SPAN("%s%s%s" % (name, relationship, contact.phone)), A(T("Edit"), _class="editBtn fright"), _id="emergency-%s" % contact.id, _class="emergency", )) # Overall content content = DIV(address_wrapper, contacts_wrapper, emergency_wrapper, _class="contacts-wrapper") # Add the javascript response = current.response s3 = response.s3 s3.scripts.append(URL(c="static", f="scripts", args=["S3", "s3.contacts.js"])) s3.js_global.append("personId = %s;" % person.id); # Custom View response.view = "pr/contacts.html" # RHeader for consistency controller = current.request.controller if controller == "hrm": rheader = s3db.hrm_rheader(r) elif controller == "member": rheader = s3db.member_rheader(r) elif controller == "pr": rheader = s3db.pr_rheader(r) return dict( title = T("Contacts"), rheader = rheader, content = content, ) # ============================================================================= def pr_profile(r, **attr): """ Custom Method to provide the auth_user profile as a Tab of the Person """ if r.http != "GET": r.error(405, current.manager.ERROR.BAD_METHOD) T = current.T db = current.db s3db = current.s3db person = r.record # Profile ltable = s3db.pr_person_user query = (ltable.pe_id == person.pe_id) profile = db(query).select(limitby=(0, 1)).first() # Custom View response.view = "pr/profile.html" # RHeader for consistency rheader = s3db.hrm_rheader(r) return dict( title = T("Profile"), rheader = rheader, form = form, ) # ============================================================================= def pr_rheader(r, tabs=[]): """ Person Registry resource headers - used in PR, HRM, DVI, MPR, MSG, VOL """ T = current.T db = current.db s3db = current.s3db gis = current.gis s3 = current.response.s3 tablename, record = s3_rheader_resource(r) if r.representation == "html": rheader_tabs = s3_rheader_tabs(r, tabs) if tablename == "pr_person": person = record if person: s3 = current.response.s3 ptable = r.table itable = s3db.pr_image query = (itable.pe_id == record.pe_id) & \ (itable.profile == True) image = db(query).select(itable.image, limitby=(0, 1)).first() if image: image = TD(itable.image.represent(image.image), _rowspan=3) else: image = "" rheader = DIV(TABLE( TR(TH("%s: " % T("Name")), s3_fullname(person), TH("%s: " % T("ID Tag Number")), "%(pe_label)s" % person, image), TR(TH("%s: " % T("Date of Birth")), "%s" % (person.date_of_birth or T("unknown")), TH("%s: " % T("Gender")), "%s" % s3.pr_gender_opts.get(person.gender, T("unknown"))), TR(TH("%s: " % T("Nationality")), "%s" % (gis.get_country(person.nationality, key_type="code") or T("unknown")), TH("%s: " % T("Age Group")), "%s" % s3.pr_age_group_opts.get(person.age_group, T("unknown"))), ), rheader_tabs) return rheader elif tablename == "pr_group": group = record if group: table = s3db.pr_group_membership query = (table.group_id == record.id) & \ (table.group_head == True) leader = db(query).select(table.person_id, limitby=(0, 1)).first() if leader: leader = s3_fullname(leader.person_id) else: leader = "" rheader = DIV(TABLE( TR(TH("%s: " % T("Name")), group.name, TH("%s: " % T("Leader")) if leader else "", leader), TR(TH("%s: " % T("Description")), group.description or "", TH(""), "") ), rheader_tabs) return rheader return None # ============================================================================= # Affiliation Callbacks # ============================================================================= # def pr_update_affiliations(table, record): """ Update all affiliations related to this record """ if hasattr(table, "_tablename"): rtype = table._tablename else: rtype = table db = current.db s3db = current.s3db if rtype == "hrm_human_resource": # Get the HR record htable = s3db.hrm_human_resource if not isinstance(record, Row): record = db(htable.id == record).select(htable.ALL, limitby=(0, 1)).first() if not record: return # Find the person_ids to update update = pr_human_resource_update_affiliations person_id = None if record.deleted_fk: try: person_id = json.loads(record.deleted_fk)["person_id"] except: pass if person_id: update(person_id) if person_id != record.person_id: person_id = record.person_id if person_id: update(person_id) elif rtype == "pr_group_membership": mtable = s3db.pr_group_membership if not isinstance(record, Row): record = db(mtable.id == record).select(mtable.ALL, limitby=(0, 1)).first() if not record: return pr_group_update_affiliations(record) elif rtype == "org_organisation_branch": ltable = s3db.org_organisation_branch if not isinstance(record, Row): record = db(ltable.id == record).select(ltable.ALL, limitby=(0, 1)).first() if not record: return pr_organisation_update_affiliations(record) elif rtype == "org_site": pr_site_update_affiliations(record) return # ============================================================================= def pr_organisation_update_affiliations(record): db = current.db s3db = current.s3db if record.deleted and record.deleted_fk: try: fk = json.loads(record.deleted_fk) branch_id = fk["branch_id"] except: return else: branch_id = record.branch_id BRANCHES = "Branches" otable = s3db.org_organisation btable = otable.with_alias("branch") ltable = s3db.org_organisation_branch rtable = s3db.pr_role atable = s3db.pr_affiliation etable = s3db.pr_pentity o = otable._tablename b = btable._tablename r = rtable._tablename # Get current memberships query = (ltable.branch_id == branch_id) & \ (ltable.deleted != True) left = [otable.on(ltable.organisation_id == otable.id), btable.on(ltable.branch_id == btable.id)] rows = db(query).select(otable.pe_id, btable.pe_id, left=left) current_memberships = [(row[o].pe_id, row[b].pe_id) for row in rows] # Get current affiliations query = (rtable.deleted != True) & \ (rtable.role == BRANCHES) & \ (rtable.pe_id == etable.pe_id) & \ (etable.instance_type == o) & \ (atable.deleted != True) & \ (atable.role_id == rtable.id) & \ (atable.pe_id == btable.pe_id) & \ (btable.id == branch_id) rows = db(query).select(rtable.pe_id, btable.pe_id) current_affiliations = [(row[r].pe_id, row[b].pe_id) for row in rows] # Remove all affiliations which are not current memberships for a in current_affiliations: org, branch = a if a not in current_memberships: pr_remove_affiliation(org, branch, role=BRANCHES) else: current_memberships.remove(a) # Add affiliations for all new memberships for m in current_memberships: org, branch = m pr_add_affiliation(org, branch, role=BRANCHES, role_type=OU) return # ============================================================================= def pr_group_update_affiliations(record): """ Update affiliations for group memberships, currently this makes all members of a group organisational units of the group. @param record: the membership record """ db = current.db s3db = current.s3db MEMBERS = "Members" if record.deleted and record.deleted_fk: try: fk = json.loads(record.deleted_fk) person_id = fk["person_id"] except: return else: person_id = record.person_id ptable = s3db.pr_person gtable = s3db.pr_group mtable = s3db.pr_group_membership rtable = s3db.pr_role atable = s3db.pr_affiliation etable = s3db.pr_pentity g = gtable._tablename r = rtable._tablename p = ptable._tablename # Get current memberships query = (mtable.person_id == person_id) & \ (mtable.deleted != True) left = [ptable.on(mtable.person_id == ptable.id), gtable.on(mtable.group_id == gtable.id)] rows = db(query).select(ptable.pe_id, gtable.pe_id, left=left) current_memberships = [(row[g].pe_id, row[p].pe_id) for row in rows] # Get current affiliations query = (rtable.deleted != True) & \ (rtable.role == MEMBERS) & \ (rtable.pe_id == etable.pe_id) & \ (etable.instance_type == g) & \ (atable.deleted != True) & \ (atable.role_id == rtable.id) & \ (atable.pe_id == ptable.pe_id) & \ (ptable.id == person_id) rows = db(query).select(ptable.pe_id, rtable.pe_id) current_affiliations = [(row[r].pe_id, row[p].pe_id) for row in rows] # Remove all affiliations which are not current memberships for a in current_affiliations: group, person = a if a not in current_memberships: pr_remove_affiliation(group, person, role=MEMBERS) else: current_memberships.remove(a) # Add affiliations for all new memberships for m in current_memberships: group, person = m pr_add_affiliation(group, person, role=MEMBERS, role_type=OU) return # ============================================================================= def pr_site_update_affiliations(record): """ Update the affiliations of an org_site instance @param record: the instance record """ db = current.db s3db = current.s3db SITES = "Sites" otable = s3db.org_organisation stable = s3db.org_site rtable = s3db.pr_role ptable = s3db.pr_pentity atable = s3db.pr_affiliation o_pe_id = None s_pe_id = record.pe_id organisation_id = record.organisation_id if organisation_id: org = db(otable.id == organisation_id).select(otable.pe_id, limitby=(0, 1)).first() if org: o_pe_id = org.pe_id if s_pe_id: query = (atable.deleted != True) & \ (atable.pe_id == s_pe_id) & \ (rtable.deleted != True) & \ (rtable.id == atable.role_id) & \ (rtable.role == SITES) & \ (ptable.pe_id == rtable.pe_id) & \ (ptable.instance_type == str(otable)) rows = db(query).select(rtable.pe_id) seen = False for row in rows: if o_pe_id == None or o_pe_id != row.pe_id: pr_remove_affiliation(row.pe_id, s_pe_id, role=SITES) elif o_pe_id == row.pe_id: seen = True if o_pe_id and not seen: pr_add_affiliation(o_pe_id, s_pe_id, role=SITES, role_type=OU) return # ============================================================================= def pr_human_resource_update_affiliations(person_id): """ Update all affiliations related to the HR records of a person @param person_id: the person record ID """ db = current.db s3db = current.s3db STAFF = "Staff" VOLUNTEER = "Volunteer" update = pr_human_resource_update_affiliations etable = s3db.pr_pentity ptable = s3db.pr_person rtable = s3db.pr_role atable = s3db.pr_affiliation htable = s3db.hrm_human_resource otable = s3db.org_organisation stable = s3db.org_site h = htable._tablename s = stable._tablename o = otable._tablename r = rtable._tablename e = etable._tablename # Get the PE-ID for this person pe_id = s3db.pr_get_pe_id("pr_person", person_id) # Get all current HR records query = (htable.person_id == person_id) & \ (htable.status == 1) & \ (htable.type.belongs((1,2))) & \ (htable.deleted != True) left = [otable.on(htable.organisation_id == otable.id), stable.on(htable.site_id == stable.site_id)] rows = db(query).select(htable.site_id, htable.type, otable.pe_id, stable.uuid, stable.instance_type, left=left) # Extract all master PE's masters = {STAFF:[], VOLUNTEER:[]} sites = Storage() for row in rows: if row[h].type == 1: role = STAFF site_id = row[h].site_id site_pe_id = None if site_id and site_id not in sites: itable = s3db.table(row[s].instance_type, None) if itable and "pe_id" in itable.fields: q = itable.site_id == site_id site = db(q).select(itable.pe_id, limitby=(0, 1)).first() if site: site_pe_id = sites[site_id] = site.pe_id else: site_pe_id = sites[site_id] if site_pe_id and site_pe_id not in masters[role]: masters[role].append(site_pe_id) elif row[h].type == 2: role = VOLUNTEER else: continue org_pe_id = row[o].pe_id if org_pe_id and org_pe_id not in masters[role]: masters[role].append(org_pe_id) # Get all current affiliations query = (ptable.id == person_id) & \ (atable.deleted != True) & \ (atable.pe_id == ptable.pe_id) & \ (rtable.deleted != True) & \ (rtable.id == atable.role_id) & \ (rtable.role.belongs((STAFF, VOLUNTEER))) & \ (etable.pe_id == rtable.pe_id) & \ (etable.instance_type.belongs((o, s))) affiliations = db(query).select(rtable.id, rtable.pe_id, rtable.role, etable.instance_type) # Remove all affiliations which are not in masters for a in affiliations: pe = a[r].pe_id role = a[r].role if role in masters: if pe not in masters[role]: pr_remove_affiliation(pe, pe_id, role=role) else: masters[role].remove(pe) # Add affiliations to all masters which are not in current affiliations for role in masters: if role == VOLUNTEER: role_type = OTHER_ROLE else: role_type = OU for m in masters[role]: pr_add_affiliation(m, pe_id, role=role, role_type=role_type) return # ============================================================================= # Affiliation Helpers # ============================================================================= # def pr_add_affiliation(master, affiliate, role=None, role_type=OU): """ Add a new affiliation record @param master: the master entity, either as PE-ID or as tuple (instance_type, instance_id) @param affiliate: the affiliated entity, either as PE-ID or as tuple (instance_type, instance_id) @param role: the role to add the affiliate to (will be created if it doesn't yet exist) @param role_type: the type of the role, defaults to OU """ db = current.db s3db = current.s3db if not role: return master_pe = pr_get_pe_id(master) affiliate_pe = pr_get_pe_id(affiliate) if master_pe and affiliate_pe: rtable = s3db.pr_role query = (rtable.pe_id == master_pe) & \ (rtable.role == role) & \ (rtable.deleted != True) row = db(query).select(limitby=(0, 1)).first() if not row: data = {"pe_id": master_pe, "role": role, "role_type": role_type} role_id = rtable.insert(**data) else: role_id = row.id if role_id: pr_add_to_role(role_id, affiliate_pe) return # ============================================================================= def pr_remove_affiliation(master, affiliate, role=None): """ Remove affiliation records @param master: the master entity, either as PE-ID or as tuple (instance_type, instance_id), if this is None, then all affiliations with all entities will be removed @param affiliate: the affiliated entity, either as PE-ID or as tuple (instance_type, instance_id) @param affiliate: the affiliated PE, either as pe_id or as tuple (instance_type, instance_id) @param role: name of the role to remove the affiliate from, if None, the affiliate will be removed from all roles """ db = current.db s3db = current.s3db master_pe = pr_get_pe_id(master) affiliate_pe = pr_get_pe_id(affiliate) if affiliate_pe: atable = s3db.pr_affiliation rtable = s3db.pr_role query = (atable.pe_id == affiliate_pe) & \ (atable.role_id == rtable.id) if master_pe: query &= (rtable.pe_id == master_pe) if role: query &= (rtable.role == role) rows = db(query).select(rtable.id) for row in rows: pr_remove_from_role(row.id, affiliate_pe) return # ============================================================================= # PE Helpers # ============================================================================= # def pr_get_pe_id(entity, record_id=None): """ Get the PE-ID of an instance record @param entity: the entity, either a tablename, a tuple (tablename, record_id), a Row of the instance type, or a PE-ID @param record_id: the record ID, if entity is a tablename @returns: the PE-ID """ db = current.db s3db = current.s3db if record_id is not None: table, _id = entity, record_id elif isinstance(entity, (tuple, list)): table, _id = entity elif isinstance(entity, Row): if "pe_id" in entity: return entity["pe_id"] else: for f in entity.values(): if isinstance(f, Row) and "pe_id" in f: return f["pe_id"] return None else: return entity if not hasattr(table, "_tablename"): table = s3db.table(table, None) record = None if table: if "pe_id" in table.fields and _id: record = db(table._id==_id).select(table.pe_id, limitby=(0, 1)).first() elif _id: key = table._id.name if key == "pe_id": return _id if key != "id" and "instance_type" in table.fields: s = db(table._id==_id).select(table.instance_type, limitby=(0, 1)).first() else: return None if not s: return None table = s3db.table(s.instance_type, None) if table and "pe_id" in table.fields: record = db(table[key] == _id).select(table.pe_id, limitby=(0, 1)).first() else: return None if record: return record.pe_id return None # ============================================================================= # Back-end Role tools # ============================================================================= # def pr_define_role(pe_id, role=None, role_type=None, entity_type=None, sub_type=None): """ Back-end method to define a new affiliates-role for a person entity @param pe_id: the person entity ID @param role: the role name @param role_type: the role type (from pr_role_types), default 9 @param entity_type: limit selection in CRUD forms to this entity type @param sub_type: limit selection in CRUD forms to this entity sub-type """ db = current.db s3db = current.s3db if not pe_id: return if role_type not in s3db.pr_role_types: role_type = 9 # Other data = {"pe_id": pe_id, "role": role, "role_type": role_type, "entity_type": entity_type, "sub_type": sub_type} rtable = s3db.pr_role if role: query = (rtable.pe_id == pe_id) & \ (rtable.role == role) duplicate = db(query).select(rtable.id, rtable.role_type, limitby=(0, 1)).first() else: duplicate = None if duplicate: if duplicate.role_type != role_type: # Clear paths if this changes the role type if str(role_type) != str(OU): data["path"] = None s3db.pr_role_rebuild_path(duplicate.id, clear=True) duplicate.update_record(**data) record_id = duplicate.id else: record_id = rtable.insert(**data) return record_id # ============================================================================= def pr_delete_role(role_id): """ Back-end method to delete a role @param role_id: the role ID """ manager = current.manager resource = manager.define_resource("pr", "role", id=role_id) return resource.delete() # ============================================================================= def pr_add_to_role(role_id, pe_id): """ Back-end method to add a person entity to a role. @param role_id: the role ID @param pe_id: the person entity ID @todo: update descendant paths only if the role is a OU role """ db = current.db s3db = current.s3db atable = s3db.pr_affiliation # Check for duplicate query = (atable.role_id == role_id) & (atable.pe_id == pe_id) affiliation = db(query).select(limitby=(0, 1)).first() if affiliation is None: # Insert affiliation record atable.insert(role_id=role_id, pe_id=pe_id) # Clear descendant paths (triggers lazy rebuild) pr_rebuild_path(pe_id, clear=True) return # ============================================================================= def pr_remove_from_role(role_id, pe_id): """ Back-end method to remove a person entity from a role. @param role_id: the role ID @param pe_id: the person entity ID @todo: update descendant paths only if the role is a OU role """ db = current.db s3db = current.s3db atable = s3db.pr_affiliation query = (atable.role_id == role_id) & (atable.pe_id == pe_id) affiliation = db(query).select(limitby=(0, 1)).first() if affiliation is not None: # Soft-delete the record, clear foreign keys deleted_fk = {"role_id": role_id, "pe_id": pe_id} data = {"deleted": True, "role_id": None, "pe_id": None, "deleted_fk": json.dumps(deleted_fk)} affiliation.update_record(**data) # Clear descendant paths pr_rebuild_path(pe_id, clear=True) return # ============================================================================= # Back-end Path Tools # ============================================================================= # def pr_get_role_paths(pe_id, roles=None, role_types=None): """ Get the ancestor paths of the ancester OU's this person entity is affiliated with, sorted by roles @param pe_id: the person entity ID @param roles: list of roles to limit the search @param role_types: list of role types to limit the search @note: role_types is ignored if roles gets specified """ db = current.db s3db = current.s3db atable = s3db.pr_affiliation rtable = s3db.pr_role query = (atable.deleted != True) & \ (atable.role_id == rtable.id) & \ (atable.pe_id == pe_id) & \ (rtable.deleted != True) if roles is not None: if not isinstance(roles, (list, tuple)): roles = [roles] query &= (rtable.role.belongs(roles)) elif role_types is not None: if not isinstance(role_types, (list, tuple)): role_types = [role_types] query &= (rtable.role_type.belongs(role_types)) rows = db(query).select(rtable.role, rtable.path, rtable.pe_id) role_paths = Storage() for role in rows: name = role.role if name in role_paths: multipath = role_paths[name] multipath.append([role.pe_id]) else: multipath = S3MultiPath([role.pe_id]) path = pr_get_path(role.pe_id) multipath.extend(role.pe_id, path, cut=pe_id) role_paths[name] = multipath.clean() return role_paths # ============================================================================= def pr_get_role_branches(pe_id, roles=None, role_types=None, entity_type=None): """ Get all descendants of the immediate ancestors of the entity within these roles/role types """ db = current.db s3db = current.s3db rtable = s3db.pr_role atable = s3db.pr_affiliation etable = s3db.pr_pentity rn = rtable._tablename en = etable._tablename query = (atable.deleted != True) & \ (atable.pe_id == pe_id) & \ (atable.role_id == rtable.id) & \ (rtable.pe_id == etable.pe_id) if roles is not None: if not isinstance(roles, (list, tuple)): roles = [roles] query &= (rtable.role.belongs(roles)) elif role_types is not None: if not isinstance(role_types, (list, tuple)): role_types = [role_types] query &= (rtable.role_type.belongs(role_types)) rows = db(query).select(rtable.pe_id, etable.instance_type) nodes = [r[rn].pe_id for r in rows] result = [r[rn].pe_id for r in rows if entity_type is None or r[en].instance_type == entity_type] branches = pr_get_descendants(nodes, entity_type=entity_type) return result+branches # ============================================================================= def pr_get_path(pe_id): """ Get all ancestor paths of a person entity @param pe_id: the person entity ID """ db = current.db s3db = current.s3db atable = s3db.pr_affiliation rtable = s3db.pr_role query = (atable.deleted != True) & \ (atable.role_id == rtable.id) & \ (atable.pe_id == pe_id) & \ (rtable.deleted != True) & \ (rtable.role_type == OU) roles = db(query).select(rtable.ALL) multipath = S3MultiPath() append = multipath.append for role in roles: path = S3MultiPath([role.pe_id]) if role.path is None: ppath = pr_role_rebuild_path(role) else: ppath = S3MultiPath(role.path) path.extend(role.pe_id, ppath, cut=pe_id) for p in path.paths: append(p) return multipath.clean() # ============================================================================= def pr_get_ancestors(pe_id): """ Find all ancestor entities of a person entity in the OU hierarchy (performs a path lookup where paths are available, otherwise rebuilds paths). @param pe_id: the person entity ID @todo: be able to filter by type and subtype """ db = current.db s3db = current.s3db atable = s3db.pr_affiliation rtable = s3db.pr_role query = (atable.deleted != True) & \ (atable.role_id == rtable.id) & \ (atable.pe_id == pe_id) & \ (rtable.deleted != True) & \ (rtable.role_type == OU) roles = db(query).select(rtable.ALL) paths = [] append = paths.append for role in roles: path = S3MultiPath([role.pe_id]) if role.path is None: ppath = pr_role_rebuild_path(role) else: ppath = S3MultiPath(role.path) path.extend(role.pe_id, ppath, cut=pe_id) append(path) ancestors = S3MultiPath.all_nodes(paths) return ancestors # ============================================================================= def pr_get_descendants(pe_ids, skip=[], entity_type=None, ids=True): """ Find descendant entities of a person entity in the OU hierarchy (performs a real search, not a path lookup). @param pe_ids: person entity ID or list of IDs @param skip: list of person entity IDs to skip during descending @todo: be able to filter by type and subtype """ db = current.db s3db = current.s3db etable = s3db.pr_pentity rtable = s3db.pr_role atable = s3db.pr_affiliation en = etable._tablename an = atable._tablename if type(pe_ids) is not list: pe_ids = [pe_ids] pe_ids = [i for i in pe_ids if i not in skip] if not pe_ids: return [] query = (rtable.deleted != True) & \ (rtable.pe_id.belongs(pe_ids)) & \ (~(rtable.pe_id.belongs(skip))) &\ (rtable.role_type == OU) & \ (atable.deleted != True) & \ (atable.role_id == rtable.id) & \ (etable.pe_id == atable.pe_id) skip = skip + pe_ids rows = db(query).select(atable.pe_id, etable.instance_type) nodes = [(r[an].pe_id, r[en].instance_type) for r in rows] result = [] append = result.append for n in nodes: if n not in result: append(n) node_ids = [n[0] for n in result] descendants = pr_get_descendants(node_ids, skip=skip, ids=False) for d in descendants: if d not in result: append(d) if ids: return [n[0] for n in result if entity_type is None or n[1] == entity_type] else: return result # ============================================================================= # Internal Path Tools # ============================================================================= # def pr_rebuild_path(pe_id, clear=False): """ Rebuild the ancestor path of all roles in the OU hierarchy a person entity defines. @param pe_id: the person entity ID @param clear: clear paths in descendant roles (triggers lazy rebuild) """ db = current.db s3db = current.s3db if isinstance(pe_id, Row): pe_id = row.pe_id rtable = s3db.pr_role query = (rtable.deleted != True) & \ (rtable.pe_id == pe_id) & \ (rtable.role_type == OU) db(query).update(path=None) roles = db(query).select() for role in roles: if role.path is None: pr_role_rebuild_path(role, clear=clear) return # ============================================================================= def pr_role_rebuild_path(role_id, skip=[], clear=False): """ Rebuild the ancestor path in a role within the OU hierarchy @param role_id: the role ID @param skip: list of role IDs to skip during recursion @param clear: clear paths in descendant roles (triggers lazy rebuild) """ db = current.db s3db = current.s3db rtable = s3db.pr_role atable = s3db.pr_affiliation if isinstance(role_id, Row): role = role_id role_id = role.id else: query = (rtable.deleted != True) & \ (rtable.id == role_id) role = db(query).select(limitby=(0, 1)).first() if not role: return None pe_id = role.pe_id if role_id in skip: return role.path skip = skip + [role_id] if role.role_type != OU: path = None else: # Get all parent roles query = (atable.deleted != True) & \ (atable.pe_id == pe_id) & \ (rtable.deleted != True) & \ (rtable.id == atable.role_id) & \ (rtable.role_type == OU) parent_roles = db(query).select(rtable.ALL) # Update ancestor path path = S3MultiPath() for prole in parent_roles: path.append([prole.pe_id]) if prole.path is None: ppath = pr_role_rebuild_path(prole, skip=skip) else: ppath = S3MultiPath(prole.path) if ppath is not None: path.extend(prole.pe_id, ppath, cut=pe_id) db(rtable.id == role_id).update(path=str(path)) # Clear descendant paths, if requested (only necessary for writes) if clear: query = (rtable.deleted != True) & \ (rtable.path.like("%%|%s|%%" % pe_id)) & \ (~(rtable.id.belongs(skip))) db(query).update(path=None) return path # END =========================================================================
{ "content_hash": "cacc773dcea8fbf8af68d07127e6c8c8", "timestamp": "", "source": "github", "line_count": 3884, "max_line_length": 267, "avg_line_length": 40.48043254376931, "alnum_prop": 0.42929286504776565, "repo_name": "flavour/cedarbluff", "id": "e324effc6c425beda029dc64c6075158e0ec0804", "size": "157252", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/eden/pr.py", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "9763403" }, { "name": "PHP", "bytes": "15220" }, { "name": "Python", "bytes": "21560680" }, { "name": "Shell", "bytes": "1171" } ], "symlink_target": "" }
"""autogenerated by genpy from se306Project/PoseMsg.msg. Do not edit.""" import sys python3 = True if sys.hexversion > 0x03000000 else False import genpy import struct import geometry_msgs.msg import std_msgs.msg class PoseMsg(genpy.Message): _md5sum = "6e8ef06f4a76cd2ca058c153a18cd476" _type = "se306Project/PoseMsg" _has_header = False #flag to mark the presence of a Header object _full_text = """# A special position message which defines who the sender is # String sender: The type of robot sending the message # Possible Values: # farmer # sheepdog # sheep # grass # truck # Pose position: Position of the sender. # Pose2D messages are composed as: # float64 x # float64 y # float64 theta std_msgs/String sender geometry_msgs/Pose2D position ================================================================================ MSG: std_msgs/String string data ================================================================================ MSG: geometry_msgs/Pose2D # This expresses a position and orientation on a 2D manifold. float64 x float64 y float64 theta """ __slots__ = ['sender','position'] _slot_types = ['std_msgs/String','geometry_msgs/Pose2D'] def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: sender,position :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(PoseMsg, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.sender is None: self.sender = std_msgs.msg.String() if self.position is None: self.position = geometry_msgs.msg.Pose2D() else: self.sender = std_msgs.msg.String() self.position = geometry_msgs.msg.Pose2D() def _get_types(self): """ internal API method """ return self._slot_types def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self.sender.data length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_3d.pack(_x.position.x, _x.position.y, _x.position.theta)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te) def deserialize(self, str): """ unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str`` """ try: if self.sender is None: self.sender = std_msgs.msg.String() if self.position is None: self.position = geometry_msgs.msg.Pose2D() end = 0 start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.sender.data = str[start:end].decode('utf-8') else: self.sender.data = str[start:end] _x = self start = end end += 24 (_x.position.x, _x.position.y, _x.position.theta,) = _struct_3d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self.sender.data length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) buff.write(struct.pack('<I%ss'%length, length, _x)) _x = self buff.write(_struct_3d.pack(_x.position.x, _x.position.y, _x.position.theta)) except struct.error as se: self._check_types(se) except TypeError as te: self._check_types(te) def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: if self.sender is None: self.sender = std_msgs.msg.String() if self.position is None: self.position = geometry_msgs.msg.Pose2D() end = 0 start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.sender.data = str[start:end].decode('utf-8') else: self.sender.data = str[start:end] _x = self start = end end += 24 (_x.position.x, _x.position.y, _x.position.theta,) = _struct_3d.unpack(str[start:end]) return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill _struct_I = genpy.struct_I _struct_3d = struct.Struct("<3d")
{ "content_hash": "6906e2cd081fc7ab84bf3b41b8b4cf2b", "timestamp": "", "source": "github", "line_count": 172, "max_line_length": 92, "avg_line_length": 30.593023255813954, "alnum_prop": 0.6180159635119726, "repo_name": "TomHulme/306-Swarm-Robotics-Project", "id": "accc9457ea3659dfaac70c55e314ef382af42c97", "size": "5262", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "se306Project/src/se306Project/msg/_PoseMsg.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "928" }, { "name": "C++", "bytes": "52863" }, { "name": "Python", "bytes": "39718" } ], "symlink_target": "" }
import argparse import imp import os import sys import manifest from . import vcs from .log import get_logger here = os.path.dirname(__file__) wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir)) def update(tests_root, manifest, working_copy=False): tree = None if not working_copy: tree = vcs.Git.for_path(tests_root, manifest.url_base) if tree is None: tree = vcs.FileSystem(tests_root, manifest.url_base) return manifest.update(tree) def update_from_cli(**kwargs): tests_root = kwargs["tests_root"] path = kwargs["path"] assert tests_root is not None m = None logger = get_logger() if not kwargs.get("rebuild", False): try: m = manifest.load(tests_root, path) except manifest.ManifestVersionMismatch: logger.info("Manifest version changed, rebuilding") m = None else: logger.info("Updating manifest") if m is None: m = manifest.Manifest(kwargs["url_base"]) changed = update(tests_root, m, working_copy=kwargs["work"]) if changed: manifest.write(m, path) def abs_path(path): return os.path.abspath(os.path.expanduser(path)) def create_parser(): parser = argparse.ArgumentParser() parser.add_argument( "-p", "--path", type=abs_path, help="Path to manifest file.") parser.add_argument( "--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.") parser.add_argument( "-r", "--rebuild", action="store_true", default=False, help="Force a full rebuild of the manifest.") parser.add_argument( "--work", action="store_true", default=False, help="Build from the working tree rather than the latest commit") parser.add_argument( "--url-base", action="store", default="/", help="Base url to use as the mount point for tests in this manifest.") return parser def find_top_repo(): path = here rv = None while path != "/": if vcs.is_git_repo(path): rv = path path = os.path.abspath(os.path.join(path, os.pardir)) return rv def run(**kwargs): if kwargs["path"] is None: kwargs["path"] = os.path.join(kwargs["tests_root"], "MANIFEST.json") update_from_cli(**kwargs) def main(): opts = create_parser().parse_args() run(**vars(opts))
{ "content_hash": "44c742c63b6220798a287cd085ab3d8c", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 87, "avg_line_length": 25.705263157894738, "alnum_prop": 0.6101556101556102, "repo_name": "youtube/cobalt_sandbox", "id": "0bbbefb9111c077589d2fab23c7f289f530067b9", "size": "2464", "binary": false, "copies": "4", "ref": "refs/heads/main", "path": "third_party/web_platform_tests/tools/manifest/update.py", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
from settings import * DEBUG = CAPTCHA_TEST_MODE = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['127.0.0.1', 'localhost', '.freeward.ca']
{ "content_hash": "42fbcdc529859d2656a426c77a79171b", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 58, "avg_line_length": 33.5, "alnum_prop": 0.6940298507462687, "repo_name": "vollov/django-blog", "id": "a99a1c41fe310827070a8c3b406bc2a0dee762e4", "size": "134", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/settings_prod.py", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "2090" }, { "name": "HTML", "bytes": "9094" }, { "name": "JavaScript", "bytes": "6583" }, { "name": "Python", "bytes": "38462" } ], "symlink_target": "" }
import absl.testing import numpy from . import test_util model_path = "https://tfhub.dev/sayakpaul/lite-model/east-text-detector/dr/1?lite-format=tflite" class EastTextDetectorTest(test_util.TFLiteModelTest): def __init__(self, *args, **kwargs): super(EastTextDetectorTest, self).__init__(model_path, *args, **kwargs) def compare_results(self, iree_results, tflite_results, details): super(EastTextDetectorTest, self).compare_results(iree_results, tflite_results, details) self.assertTrue( numpy.isclose(iree_results[0], tflite_results[0], atol=1e-3).all()) # The second return is extremely noisy as it is not a binary classification. To handle we # check normalized correlation with an expectation of "close enough". iree_norm = numpy.sqrt(iree_results[1] * iree_results[1]) tflite_norm = numpy.sqrt(tflite_results[1] * tflite_results[1]) correlation = numpy.average(iree_results[1] * tflite_results[1] / iree_norm / tflite_norm) self.assertTrue(numpy.isclose(correlation, 1.0, atol=1e-2).all()) def test_compile_tflite(self): self.compile_and_execute() if __name__ == '__main__': absl.testing.absltest.main()
{ "content_hash": "edf8b6ac55c635b016bdb3999b5eef30", "timestamp": "", "source": "github", "line_count": 33, "max_line_length": 96, "avg_line_length": 38.27272727272727, "alnum_prop": 0.6642913697545526, "repo_name": "google/iree", "id": "b5d9f26470e4cce9ff3c213fe5ef94d87c08a5c6", "size": "1481", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "integrations/tensorflow/test/python/iree_tfl_tests/east_text_detector_test.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Assembly", "bytes": "23010" }, { "name": "Batchfile", "bytes": "353" }, { "name": "C", "bytes": "3830546" }, { "name": "C++", "bytes": "8161374" }, { "name": "CMake", "bytes": "899403" }, { "name": "Dockerfile", "bytes": "28245" }, { "name": "GLSL", "bytes": "2629" }, { "name": "HTML", "bytes": "31018" }, { "name": "Java", "bytes": "31697" }, { "name": "JavaScript", "bytes": "18714" }, { "name": "MLIR", "bytes": "5606822" }, { "name": "NASL", "bytes": "3852" }, { "name": "PowerShell", "bytes": "7893" }, { "name": "Python", "bytes": "1143963" }, { "name": "Shell", "bytes": "248374" }, { "name": "Starlark", "bytes": "600260" } ], "symlink_target": "" }
__version__ = '0.10.2' import logging try: # not available in python 2.6 from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass # Add NullHandler to prevent logging warnings logging.getLogger(__name__).addHandler(NullHandler()) from pika.connection import ConnectionParameters from pika.connection import URLParameters from pika.credentials import PlainCredentials from pika.spec import BasicProperties from pika.adapters import BaseConnection from pika.adapters import BlockingConnection from pika.adapters import SelectConnection from pika.adapters import TornadoConnection from pika.adapters import TwistedConnection from pika.adapters import LibevConnection
{ "content_hash": "decb93d8ac196755ddd458d19143f62a", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 53, "avg_line_length": 28.22222222222222, "alnum_prop": 0.7979002624671916, "repo_name": "knowsis/pika", "id": "602b6a31e75306ee62ed662c8849171012c3dd6d", "size": "762", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pika/__init__.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Python", "bytes": "761321" } ], "symlink_target": "" }
"""Support for Nanoleaf Lights.""" import logging from pynanoleaf import Nanoleaf, Unavailable import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_TRANSITION, Light, ) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN import homeassistant.helpers.config_validation as cv from homeassistant.util import color as color_util from homeassistant.util.color import ( color_temperature_mired_to_kelvin as mired_to_kelvin, ) from homeassistant.util.json import load_json, save_json _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = "Nanoleaf" DATA_NANOLEAF = "nanoleaf" CONFIG_FILE = ".nanoleaf.conf" ICON = "mdi:triangle-outline" SUPPORT_NANOLEAF = ( SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_COLOR | SUPPORT_TRANSITION ) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend( { vol.Required(CONF_HOST): cv.string, vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, } ) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Nanoleaf light.""" if DATA_NANOLEAF not in hass.data: hass.data[DATA_NANOLEAF] = dict() token = "" if discovery_info is not None: host = discovery_info["host"] name = discovery_info["hostname"] # if device already exists via config, skip discovery setup if host in hass.data[DATA_NANOLEAF]: return _LOGGER.info("Discovered a new Nanoleaf: %s", discovery_info) conf = load_json(hass.config.path(CONFIG_FILE)) if conf.get(host, {}).get("token"): token = conf[host]["token"] else: host = config[CONF_HOST] name = config[CONF_NAME] token = config[CONF_TOKEN] nanoleaf_light = Nanoleaf(host) if not token: token = nanoleaf_light.request_token() if not token: _LOGGER.error( "Could not generate the auth token, did you press " "and hold the power button on %s" "for 5-7 seconds?", name, ) return conf = load_json(hass.config.path(CONFIG_FILE)) conf[host] = {"token": token} save_json(hass.config.path(CONFIG_FILE), conf) nanoleaf_light.token = token try: nanoleaf_light.available except Unavailable: _LOGGER.error("Could not connect to Nanoleaf Light: %s on %s", name, host) return hass.data[DATA_NANOLEAF][host] = nanoleaf_light add_entities([NanoleafLight(nanoleaf_light, name)], True) class NanoleafLight(Light): """Representation of a Nanoleaf Light.""" def __init__(self, light, name): """Initialize an Nanoleaf light.""" self._available = True self._brightness = None self._color_temp = None self._effect = None self._effects_list = None self._light = light self._name = name self._hs_color = None self._state = None @property def available(self): """Return availability.""" return self._available @property def brightness(self): """Return the brightness of the light.""" if self._brightness is not None: return int(self._brightness * 2.55) return None @property def color_temp(self): """Return the current color temperature.""" if self._color_temp is not None: return color_util.color_temperature_kelvin_to_mired(self._color_temp) return None @property def effect(self): """Return the current effect.""" return self._effect @property def effect_list(self): """Return the list of supported effects.""" return self._effects_list @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" return 154 @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" return 833 @property def name(self): """Return the display name of this light.""" return self._name @property def icon(self): """Return the icon to use in the frontend, if any.""" return ICON @property def is_on(self): """Return true if light is on.""" return self._state @property def hs_color(self): """Return the color in HS.""" return self._hs_color @property def supported_features(self): """Flag supported features.""" return SUPPORT_NANOLEAF def turn_on(self, **kwargs): """Instruct the light to turn on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) hs_color = kwargs.get(ATTR_HS_COLOR) color_temp_mired = kwargs.get(ATTR_COLOR_TEMP) effect = kwargs.get(ATTR_EFFECT) transition = kwargs.get(ATTR_TRANSITION) if hs_color: hue, saturation = hs_color self._light.hue = int(hue) self._light.saturation = int(saturation) if color_temp_mired: self._light.color_temperature = mired_to_kelvin(color_temp_mired) if transition: if brightness: # tune to the required brightness in n seconds self._light.brightness_transition( int(brightness / 2.55), int(transition) ) else: # If brightness is not specified, assume full brightness self._light.brightness_transition(100, int(transition)) else: # If no transition is occurring, turn on the light self._light.on = True if brightness: self._light.brightness = int(brightness / 2.55) if effect: self._light.effect = effect def turn_off(self, **kwargs): """Instruct the light to turn off.""" transition = kwargs.get(ATTR_TRANSITION) if transition: self._light.brightness_transition(0, int(transition)) else: self._light.on = False def update(self): """Fetch new state data for this light.""" try: self._available = self._light.available self._brightness = self._light.brightness self._color_temp = self._light.color_temperature self._effect = self._light.effect self._effects_list = self._light.effects self._hs_color = self._light.hue, self._light.saturation self._state = self._light.on except Unavailable as err: _LOGGER.error("Could not update status for %s (%s)", self.name, err) self._available = False
{ "content_hash": "6a8d01e0577e6a17bb16e9719c183aac", "timestamp": "", "source": "github", "line_count": 236, "max_line_length": 82, "avg_line_length": 29.470338983050848, "alnum_prop": 0.6018691588785047, "repo_name": "postlund/home-assistant", "id": "4b08d0b9751a40fd814649085954cb4349b3e145", "size": "6955", "binary": false, "copies": "3", "ref": "refs/heads/dev", "path": "homeassistant/components/nanoleaf/light.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Python", "bytes": "20215859" }, { "name": "Shell", "bytes": "6663" } ], "symlink_target": "" }
# Copyright 2014-2016 Open Source Robotics Foundation, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from collections import namedtuple import os import platform try: from urllib.parse import urlparse except ImportError: from urlparse import urlparse package_format_mapping = { 'debian': 'deb', 'fedora': 'rpm', 'rhel': 'rpm', 'ubuntu': 'deb', } class JobValidationError(Exception): """ Indicates that the validation of a build job failed. This exception is raised by the reconfigure_*_job functions if validation fails, e.g. because the requested package is not available in the specified index.yaml """ def __init__(self, message): # noqa: D107 super(JobValidationError, self).__init__(message) class PlatformPackageDescriptor(str): """ Represents a package stored in a platform-specific package repository. Currently the class is inheriting from str for backwards compatibility. You should not rely on this but use the `version` property instead. To be replaced with: namedtuple('PlatformPackageDescriptor', 'version source_name') """ @staticmethod def __new__(cls, version, source_name): return str.__new__(cls, version) def __init__(self, version, source_name): # noqa: D107 self.source_name = source_name @property def version(self): return str(self) next_scope_id = 1 class Scope(object): def __init__(self, scope_name, description): # noqa: D107 global next_scope_id self.scope_name = scope_name self.description = description self.scope_id = next_scope_id next_scope_id += 1 def __enter__(self): if os.environ.get('TRAVIS') == 'true': print('travis_fold:start:scope%d' % self.scope_id) print('# BEGIN %s: %s' % (self.scope_name, self.description)) def __exit__(self, type, value, traceback): print('# END %s' % self.scope_name) if os.environ.get('TRAVIS') == 'true': print('travis_fold:end:scope%d' % self.scope_id) Target = namedtuple('Target', 'os_name os_code_name arch') def get_repositories_and_script_generating_key_files( config=None, build_file=None): # extract the distribution repository urls and keys from the build file # and pass them as command line arguments and files # so that the job must not parse the build file repository_urls = [] repository_keys = [] custom_rosdep_urls = [] if config: if 'debian_repositories' in config.prerequisites: repository_urls += config.prerequisites['debian_repositories'] if 'debian_repository_keys' in config.prerequisites: repository_keys += config.prerequisites['debian_repository_keys'] assert len(repository_urls) == len(repository_keys) if build_file: assert len(build_file.repository_urls) == \ len(build_file.repository_keys) repository_urls += build_file.repository_urls repository_keys += build_file.repository_keys if hasattr(build_file, 'custom_rosdep_urls'): custom_rosdep_urls += build_file.custom_rosdep_urls # remove duplicate urls unique_repository_urls = [] unique_repository_keys = [] for i, url in enumerate(repository_urls): if url not in unique_repository_urls: unique_repository_urls.append(url) unique_repository_keys.append(repository_keys[i]) repository_args = [] if unique_repository_urls: repository_args.append('--distribution-repository-urls') repository_args += [url.replace('$', '\\$') for url in unique_repository_urls] script_generating_key_files = [] if unique_repository_keys: repository_args.append('--distribution-repository-key-files') script_generating_key_files.append("mkdir -p $WORKSPACE/keys") script_generating_key_files.append("rm -fr $WORKSPACE/keys/*") for i, repository_key in enumerate(unique_repository_keys): repository_args.append('$WORKSPACE/keys/%d.key' % i) script_generating_key_files.append( 'echo "%s" > $WORKSPACE/keys/%d.key' % (repository_key, i)) if custom_rosdep_urls: repository_args.append('--custom-rosdep-urls') repository_args += custom_rosdep_urls return repository_args, script_generating_key_files def get_distribution_repository_keys(urls, key_files): # ensure that for each key file a url has been passed assert \ len(urls) >= \ len(key_files), \ 'More distribution repository keys (%d) passes in then urls (%d)' % \ (len(key_files), len(urls)) distribution_repositories = [] for i, url in enumerate(urls): key_file = key_files[i] \ if len(key_files) > i \ else '' distribution_repositories.append((url, key_file)) print('Using the following distribution repositories:') keys = [] for url, key_file in distribution_repositories: print(' %s%s' % (url, ' (%s)' % key_file if key_file else '')) with open(key_file, 'r') as h: keys.append(h.read().rstrip()) return keys def get_binary_package_versions(apt_cache, debian_pkg_names): versions = {} for debian_pkg_name in debian_pkg_names: pkg = apt_cache[debian_pkg_name] versions[debian_pkg_name] = max(pkg.versions).version return versions def get_ci_job_name(rosdistro_name, os_name, os_code_name, arch, job_type): view_name = get_ci_view_name(rosdistro_name) job_name = '%s__%s_%s_%s_%s' % (view_name, job_type, os_name, os_code_name, arch) return job_name def get_ci_view_name(rosdistro_name): view_name = '%sci' % rosdistro_name[0].upper() return view_name def get_os_package_name_prefix(rosdistro_name): return 'ros-%s-' % rosdistro_name def get_os_package_name(rosdistro_name, ros_package_name): return '%s%s' % \ (get_os_package_name_prefix(rosdistro_name), ros_package_name.replace('_', '-')) def get_devel_view_name(rosdistro_name, source_build_name, pull_request=False): name = '%s%s' % ( rosdistro_name[0].upper(), 'dev' if not pull_request else 'pr') short_source_build_name = get_short_build_name(source_build_name) if short_source_build_name: name += '_%s' % short_source_build_name return name def get_devel_job_name(rosdistro_name, source_build_name, repo_name, os_name, os_code_name, arch, pull_request=False): view_name = get_devel_view_name( rosdistro_name, source_build_name, pull_request=pull_request) job_name = '%s__%s__%s_%s_%s' % \ (view_name, repo_name, os_name, os_code_name, arch) return job_name def get_doc_view_name(rosdistro_name, doc_build_name): name = '%sdoc' % rosdistro_name[0].upper() short_doc_build_name = get_short_build_name(doc_build_name) if short_doc_build_name: name += '_%s' % short_doc_build_name return name def get_release_job_prefix(rosdistro_name, release_build_name=None): prefix = '%srel' % rosdistro_name[0].upper() if release_build_name is not None: short_release_build_name = get_short_build_name(release_build_name) if short_release_build_name: prefix += '_%s' % short_release_build_name return prefix def get_release_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch): if arch == 'source': return get_release_source_view_name( rosdistro_name, os_name, os_code_name) else: return get_release_binary_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch) def get_release_source_view_prefix(rosdistro_name): return '%s%s' % (rosdistro_name[0].upper(), 'src') def get_release_source_view_name( rosdistro_name, os_name, os_code_name): return '%s_%s%s' % ( get_release_source_view_prefix(rosdistro_name), get_short_os_name(os_name), get_short_os_code_name(os_code_name)) def get_sourcedeb_job_name(rosdistro_name, release_build_name, pkg_name, os_name, os_code_name): view_name = get_release_source_view_name( rosdistro_name, os_name, os_code_name) return '%s__%s__%s_%s__source' % \ (view_name, pkg_name, os_name, os_code_name) def get_release_binary_view_prefix(rosdistro_name, release_build_name): prefix = '%s%s' % (rosdistro_name[0].upper(), 'bin') short_release_build_name = get_short_build_name(release_build_name) if short_release_build_name: prefix += '_%s' % short_release_build_name return prefix def get_release_binary_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch): os_code_name = get_short_os_code_name(os_code_name) arch = get_short_arch(arch) return '%s_%s%s%s' % ( get_release_binary_view_prefix(rosdistro_name, release_build_name), get_short_os_name(os_name), get_short_os_code_name(os_code_name), get_short_arch(arch)) def get_binarydeb_job_name(rosdistro_name, release_build_name, pkg_name, os_name, os_code_name, arch): view_name = get_release_binary_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch) return '%s__%s__%s_%s_%s__binary' % \ (view_name, pkg_name, os_name, os_code_name, arch) def get_short_build_name(build_name): build_name_mappings = { 'default': '', } return build_name_mappings.get(build_name, build_name) def get_short_os_name(os_name): os_name_mappings = { 'debian': 'd', 'fedora': 'fc', 'rhel': 'el', 'ubuntu': 'u', } return os_name_mappings.get(os_name, os_name) def get_short_os_code_name(os_code_name): os_code_name_mappings = { 'artful': 'A', 'bionic': 'B', 'bullseye': 'B', 'buster': 'B', 'focal': 'F', 'jammy': 'J', 'jessie': 'J', 'saucy': 'S', 'stretch': 'S', 'trusty': 'T', 'utopic': 'U', 'vivid': 'V', 'wily': 'W', 'xenial': 'X', 'yakkety': 'Y', 'zesty': 'Z', } return os_code_name_mappings.get(os_code_name, os_code_name) def get_short_arch(arch): arch_mappings = { 'aarch64': 'v8', 'amd64': '64', 'arm64': 'v8', 'armhf': 'hf', 'armhfp': 'hf', 'i386': '32', 'x86_64': '64', } return arch_mappings.get(arch, arch) def git_github_orgunit(url): result = check_https_github_com(url) if not result: return None return result.path[1:result.path.index('/', 1)] def get_github_project_url(url): if not check_https_github_com(url): return None git_suffix = '.git' if not url.endswith(git_suffix): return None url = url[:-len(git_suffix)] + '/' return url def check_https_github_com(url): result = urlparse(url) if not result: return False if result.scheme != 'https': return False netloc = result.netloc[result.netloc.find('@') + 1:] if netloc != 'github.com': return False return result def get_user_id(): uid = os.getuid() assert uid != 0, "You can not run this as user 'root'" return uid def find_executable(file_name): for path in os.getenv('PATH').split(os.path.pathsep): file_path = os.path.join(path, file_name) if os.path.isfile(file_path) and os.access(file_path, os.X_OK): return file_path return None def get_doc_job_name(rosdistro_name, doc_build_name, repo_name, os_name, os_code_name, arch): view_name = get_doc_view_name( rosdistro_name, doc_build_name) job_name = '%s__%s__%s_%s_%s' % \ (view_name, repo_name, os_name, os_code_name, arch) return job_name def get_doc_job_url( jenkins_url, rosdistro_name, doc_build_name, repository_name, os_name, os_code_name, arch): return _get_job_url( jenkins_url, get_doc_view_name(rosdistro_name, doc_build_name), get_doc_job_name( rosdistro_name, doc_build_name, repository_name, os_name, os_code_name, arch) ) def get_devel_job_urls( jenkins_url, source_build_files, rosdistro_name, repository_name): urls = [] for source_build_name in sorted(source_build_files.keys()): build_file = source_build_files[source_build_name] for os_name in sorted(build_file.targets.keys()): for os_code_name in sorted(build_file.targets[os_name].keys()): for arch in build_file.targets[os_name][os_code_name]: job_url = _get_job_url( jenkins_url, get_devel_view_name(rosdistro_name, source_build_name), get_devel_job_name( rosdistro_name, source_build_name, repository_name, os_name, os_code_name, arch) ) if job_url not in urls: urls.append(job_url) return urls def get_release_job_urls( jenkins_url, release_build_files, rosdistro_name, package_name): urls = [] # first add all source jobs for release_build_name in sorted(release_build_files.keys()): build_file = release_build_files[release_build_name] for os_name in sorted(build_file.targets.keys()): for os_code_name in sorted(build_file.targets[os_name].keys()): job_url = _get_job_url( jenkins_url, get_release_source_view_name( rosdistro_name, os_name, os_code_name), get_sourcedeb_job_name( rosdistro_name, release_build_name, package_name, os_name, os_code_name) ) if job_url not in urls: urls.append(job_url) # then add all binary jobs for release_build_name in sorted(release_build_files.keys()): build_file = release_build_files[release_build_name] for os_name in sorted(build_file.targets.keys()): for os_code_name in sorted(build_file.targets[os_name].keys()): for arch in build_file.targets[os_name][os_code_name]: job_url = _get_job_url( jenkins_url, get_release_binary_view_name( rosdistro_name, release_build_name, os_name, os_code_name, arch), get_binarydeb_job_name( rosdistro_name, release_build_name, package_name, os_name, os_code_name, arch) ) if job_url not in urls: urls.append(job_url) return urls def _get_job_url(jenkins_url, view_name, job_name): return '%s/view/%s/job/%s' % (jenkins_url, view_name, job_name) def write_groovy_script_and_configs( filename, content, job_configs, view_configs=None): """Write out the groovy script and configs to file. This writes the reconfigure script to the file location and places the expanded configs in subdirectories 'view_configs' / 'job_configs' that the script can then access when run. """ with open(filename, 'w') as h: h.write(content) if view_configs: view_config_dir = os.path.join(os.path.dirname(filename), 'view_configs') if not os.path.isdir(view_config_dir): os.makedirs(view_config_dir) for config_name, config_body in view_configs.items(): config_filename = os.path.join(view_config_dir, config_name) with open(config_filename, 'w') as config_fh: config_fh.write(config_body) job_config_dir = os.path.join(os.path.dirname(filename), 'job_configs') if not os.path.isdir(job_config_dir): os.makedirs(job_config_dir) # prefix each config file with a serial number to maintain order format_str = '%0' + str(len(str(len(job_configs)))) + 'd' i = 0 for config_name, config_body in job_configs.items(): i += 1 config_filename = os.path.join( job_config_dir, format_str % i + ' ' + config_name) with open(config_filename, 'w') as config_fh: config_fh.write(config_body) def topological_order_packages(packages): """ Order packages topologically. First returning packages which have message generators and then the rest based on all direct depends and indirect recursive run_depends. :param packages: A dict mapping relative paths to ``Package`` objects ``dict`` :returns: A list of tuples containing the relative path and a ``Package`` object, ``list`` """ from catkin_pkg.topological_order import _PackageDecorator from catkin_pkg.topological_order import _sort_decorated_packages decorators_by_name = {} for path, package in packages.items(): decorators_by_name[package.name] = _PackageDecorator(package, path) # calculate transitive dependencies for decorator in decorators_by_name.values(): decorator.depends_for_topological_order = set([]) all_depends = \ decorator.package.build_depends + decorator.package.buildtool_depends + \ decorator.package.run_depends + decorator.package.test_depends # skip external dependencies, meaning names that are not known packages unique_depend_names = set([ d.name for d in all_depends if d.name in decorators_by_name.keys() and d.evaluated_condition is not False]) unique_depend_names.update([ m for d in decorator.package.group_depends for m in d.members if d.evaluated_condition is not False]) for name in unique_depend_names: if name in decorator.depends_for_topological_order: # avoid function call to improve performance # check within the loop since the set changes every cycle continue decorators_by_name[name]._add_recursive_run_depends( decorators_by_name, decorator.depends_for_topological_order) ordered_pkg_tuples = _sort_decorated_packages(decorators_by_name) for pkg_path, pkg in ordered_pkg_tuples: if pkg_path is None: raise RuntimeError('Circular dependency in: %s' % pkg) return ordered_pkg_tuples def get_node_label(config_job_label, default_label=None): if config_job_label is not None: return config_job_label if default_label is None: default_label = get_default_node_label() return default_label def get_default_node_label(additional_label=None): label = 'buildagent' if additional_label: label += ' || ' + additional_label return label def get_system_architecture(): # this is used to determine the arch for the Docker image for source jobs # which don't explicitly specify an architecture machine = platform.machine() if machine == 'x86_64': return 'amd64' if machine == 'i386': return 'i386' if machine == 'aarch64': return 'armv8' raise RuntimeError('Unable to determine architecture') def get_packages_in_workspaces(workspace_roots, condition_context=None): """ Return packages found in the passed workspaces. :param workspace_roots: A list of absolute paths to workspaces :param condition_context: An optional dict containing environment variables for the conditional evaluation in the package manifests :returns: A list of ``Package`` objects """ from catkin_pkg.packages import find_packages pkgs = {} for workspace_root in workspace_roots: source_space = os.path.join(workspace_root, 'src') print("Crawling for packages in workspace '%s'" % source_space) ws_pkgs = find_packages(source_space) pkgs.update(ws_pkgs) if condition_context is not None: for pkg in pkgs.values(): pkg.evaluate_conditions(condition_context) return pkgs def get_xunit_publisher_types_and_patterns( ros_version, pytest_junit_compliant ): types = [] if ros_version == 1: types.append(('GoogleTestType', 'ws/test_results/**/*.xml')) elif ros_version == 2: types.append(('CTestType', 'ws/test_results/*/Testing/*/Test.xml')) types.append(('GoogleTestType', 'ws/test_results/**/*.gtest.xml')) types.append(( 'JUnitType' if pytest_junit_compliant else 'GoogleTestType', 'ws/test_results/*/pytest.xml')) # ament_cmake_pytest doesn't produce a pytest.xml types.append(( 'JUnitType' if pytest_junit_compliant else 'GoogleTestType', 'ws/test_results/**/*.xunit.xml')) else: assert False, 'Unsupported ROS version: ' + str(ros_version) return types def get_direct_dependencies( pkg_name, cached_pkgs, pkg_names, include_test_deps=True, include_group_deps=False, ): if pkg_name not in cached_pkgs: return None pkg = cached_pkgs[pkg_name] pkg_deps = (pkg.buildtool_depends + pkg.build_depends + pkg.buildtool_export_depends + pkg.build_export_depends) if include_test_deps: pkg_deps += pkg.exec_depends + pkg.test_depends # test dependencies are treated similar to build dependencies by bloom # so if configured to include test dependencies, we need them here to # ensure that all dependencies are available before starting a build depends = set([ d.name for d in pkg_deps if d.name in pkg_names and d.evaluated_condition is not False]) if include_group_deps: depends.update( m for group_dep in pkg.group_depends for m in group_dep.members if group_dep.evaluated_condition is not False) return depends def get_downstream_package_names(pkg_names, dependencies): downstream_pkg_names = set([]) for pkg_name, deps in dependencies.items(): if deps.intersection(pkg_names): downstream_pkg_names.add(pkg_name) return downstream_pkg_names def get_package_manifests(dist): cached_pkgs = {} for pkg_name in dist.release_packages.keys(): pkg_xml = dist.get_release_package_xml(pkg_name) if pkg_xml is not None: from catkin_pkg.package import InvalidPackage, parse_package_string try: pkg_manifest = parse_package_string(pkg_xml) except InvalidPackage: continue cached_pkgs[pkg_name] = pkg_manifest return cached_pkgs def get_implicitly_ignored_package_names(cached_pkgs, explicitly_ignored_pkg_names): pkg_names = set(explicitly_ignored_pkg_names).union(cached_pkgs.keys()) # get direct dependencies from distro cache for each package direct_dependencies = {} for pkg_name in cached_pkgs.keys(): direct_dependencies[pkg_name] = get_direct_dependencies( pkg_name, cached_pkgs, pkg_names) or set([]) # find recursive downstream deps for all explicitly ignored packages ignored_pkg_names = set(explicitly_ignored_pkg_names) while True: implicitly_ignored_pkg_names = get_downstream_package_names( ignored_pkg_names, direct_dependencies) if implicitly_ignored_pkg_names - ignored_pkg_names: ignored_pkg_names |= implicitly_ignored_pkg_names continue break return ignored_pkg_names.difference(explicitly_ignored_pkg_names) def filter_blocked_dependent_package_names(cached_pkgs, failed_pkg_names): """Return the list of packages that are missing and not blocked. Return the list of packages that are missing that are not depending on other missing packages. """ # get direct dependencies from distro cache for each package direct_dependencies = {} for pkg_name in cached_pkgs: direct_dependencies[pkg_name] = get_direct_dependencies( pkg_name, cached_pkgs, cached_pkgs) or set([]) # find recursive downstream deps for all explicitly ignored packages all_deps = get_downstream_package_names( failed_pkg_names, direct_dependencies) while True: blocked_pkgs = get_downstream_package_names( all_deps, direct_dependencies) if blocked_pkgs - all_deps: all_deps |= blocked_pkgs continue break return failed_pkg_names.difference(all_deps) def filter_buildfile_packages_recursively(package_names, buildfile, rosdistro_name): """Filter packages based on the build including recursively blocked packages. Filter a list of packages based on a build file's blacklist and whitelist including implicit blacklisting of dependent packages for a specific rosdistro. """ res = buildfile.filter_packages(package_names) cached_pkgs = get_package_manifests(rosdistro_name) implicitly_ignored = get_implicitly_ignored_package_names( cached_pkgs, buildfile.package_blacklist) res -= implicitly_ignored res.difference_update(buildfile.package_ignore_list) return res def get_package_condition_context(index, rosdistro_name): python_version = index.distributions[rosdistro_name].get('python_version') ros_version = { 'ros1': '1', 'ros2': '2', }.get(index.distributions[rosdistro_name].get('distribution_type')) condition_context = { 'ROS_DISTRO': rosdistro_name, } if python_version: condition_context['ROS_PYTHON_VERSION'] = python_version if ros_version: condition_context['ROS_VERSION'] = ros_version return condition_context
{ "content_hash": "b9363e7ee76b7726274b8b60921f30c3", "timestamp": "", "source": "github", "line_count": 749, "max_line_length": 94, "avg_line_length": 35.416555407209614, "alnum_prop": 0.6275492894032495, "repo_name": "ros-infrastructure/ros_buildfarm", "id": "3bf0dc81a279c7afaa6e437956fc8ba94789a9b7", "size": "26527", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ros_buildfarm/common.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "5328" }, { "name": "EmberScript", "bytes": "352484" }, { "name": "Groovy", "bytes": "1561" }, { "name": "JavaScript", "bytes": "13229" }, { "name": "Python", "bytes": "784731" }, { "name": "Shell", "bytes": "10950" } ], "symlink_target": "" }
from google.cloud import aiplatform_v1beta1 async def sample_get_artifact(): # Create a client client = aiplatform_v1beta1.MetadataServiceAsyncClient() # Initialize request argument(s) request = aiplatform_v1beta1.GetArtifactRequest( name="name_value", ) # Make the request response = await client.get_artifact(request=request) # Handle the response print(response) # [END aiplatform_generated_aiplatform_v1beta1_MetadataService_GetArtifact_async]
{ "content_hash": "3b393b3212767e19c4e044a6046a3b00", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 81, "avg_line_length": 26.210526315789473, "alnum_prop": 0.7309236947791165, "repo_name": "googleapis/python-aiplatform", "id": "d7ac573e0b400fbb88e82ac3b2b677627aa69ffa", "size": "1518", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_metadata_service_get_artifact_async.py", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Dockerfile", "bytes": "2050" }, { "name": "Python", "bytes": "23977004" }, { "name": "Shell", "bytes": "30668" } ], "symlink_target": "" }
from __future__ import print_function from datetime import datetime from bs4 import BeautifulSoup import socket import socks import hashlib import magic import os import argparse import sys import threading import logging from logging import getLogger, StreamHandler, DEBUG def fetch_soup(name, url): request = urllib2.Request(url) request.add_header('User-Agent', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)') try: html = urllib2.urlopen(request, timeout=10) except urllib2.URLError as e: logger.error('{0}: {1}, fetching from {2}'.format(name, e.reason, url)) return except Exception as e: logger.error('{0}: Failed to fetch from {1}'.format(name, url)) return soup = BeautifulSoup(html, 'html.parser') return soup def fetch_file(name, url, dest_path): try: file_binary = urllib2.urlopen(url, timeout=10).read() except urllib2.URLError as e: logger.error('{0}: {1}, fetching from {2}'.format(name, e.reason, url)) return except Exception as e: logger.error('{0}: Failed to fetch from {1}'.format(name, url)) return filetype = magic.from_buffer(file_binary, mime=True).decode(sys.stdin.encoding).split(' ')[0] file_md5 = hashlib.md5(file_binary).hexdigest() dest_filetype_path = os.path.join(dest_path, filetype) dest_file_path = os.path.join(dest_filetype_path, str(file_md5)) if not os.path.exists(dest_filetype_path): os.makedirs(dest_filetype_path) if not os.path.exists(dest_file_path): with open(dest_file_path, 'wb') as f: f.write(file_binary) logger.debug('{0}: Saved file type {1} with md5: {2}'.format(name, filetype, file_md5)) def malwaredl(soup, dest_path): name = sys._getframe().f_code.co_name logger.debug('{0}: Fetching from Malware Domain List'.format(name)) description_soup = soup('description')[1:] logger.debug('{0}: Found {1} urls'.format(name, len(description_soup))) for xml in description_soup: url = 'http://' + xml.string.replace('&amp;', '&').split(',')[0][6:] fetch_file(name, url, dest_path) def vxvault(soup, dest_path): name = sys._getframe().f_code.co_name logger.debug('{0}: Fetching from VXvault'.format(name)) url_list = soup('pre')[0].string.replace('&amp;', '&').split('\r\n')[4:-1] logger.debug('{0}: Found {1} urls'.format(name, len(url_list))) for url in url_list: fetch_file(name, url, dest_path) def malc0de(soup, dest_path): name = sys._getframe().f_code.co_name logger.debug('{0}: Fetching from Malc0de'.format(name)) description_soup = soup('description')[1:] logger.debug('{0}: Found {1} urls'.format(name, len(description_soup))) for xml in description_soup: host = xml.string.replace('&amp;', '&').split(',')[0][5:] if host is not None: url = 'http://' + host fetch_file(name, url, dest_path) else: ip_address = xml.text.split(',')[1][13:] fetch_file(name, 'http://' + ip_address, dest_path) if __name__ == '__main__': print(' ___ _ _ _ _ _ ') print(' / _ \| | | | | | | | | | ') print(' / /_\ \ |__ _ _ ___ ___ | | | | __ _| |_ ___| |__ ___ _ __ ') print(' | _ | `_ \| | | / __/ __| | |/\| |/ _` | __/ __| `_ \ / _ \ `__|') print(' | | | | |_) | |_| \__ \__ \ \ /\ / (_| | || (__| | | | __/ | ') print(' \_| |_/_.__/ \__, |___/___/ \/ \/ \__,_|\__\___|_| |_|\___|_| ') print(' __/ | ') print(' |___/ v 0.2 ') print('') parser = argparse.ArgumentParser(description='Abyss Watcher - Malware Downloader') parser.add_argument('--path', '-p', type=str, help='destination path') parser.add_argument('--torify', '-t', action='store_true', help='torify') args = parser.parse_args() today = datetime.now().strftime('%Y.%m.%d') logger = getLogger(today) handler = StreamHandler() handler.setLevel(DEBUG) logger.setLevel(DEBUG) logger.addHandler(handler) if args.path: dest_path = args.path else: dest_path = today logger.debug('{0}: {1}'.format(__name__, today)) if args.torify: socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", 9050) socket.socket = socks.socksocket import urllib2 logger.debug('{0}: {1}'.format(__name__, urllib2.urlopen("https://api.ipify.org?format=json").read())) try: t1 = threading.Thread(target = malwaredl, args = (fetch_soup(__name__, 'http://www.malwaredomainlist.com/hostslist/mdl.xml'), dest_path)) t2 = threading.Thread(target = vxvault, args = (fetch_soup(__name__, 'http://vxvault.net/URL_List.php'), dest_path)) t3 = threading.Thread(target = malc0de, args = (fetch_soup(__name__, 'http://malc0de.com/rss'), dest_path)) t1.start() t2.start() t3.start() t1.join() t2.join() t3.join() except Exception, e: logger.error('{0}: {1}'.format(__name__, e)) pass
{ "content_hash": "b875201be95c3f8d994290ea6e59dd5a", "timestamp": "", "source": "github", "line_count": 147, "max_line_length": 145, "avg_line_length": 36.034013605442176, "alnum_prop": 0.5514442137058713, "repo_name": "ntddk/Abyss-Watcher", "id": "9b78368393b0cd5aad902773f888b25dac1bb575", "size": "5336", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "abyss.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "5336" } ], "symlink_target": "" }
from swgpy.object import * def create(kernel): result = Tangible() result.template = "object/tangible/component/item/quest_item/shared_current_motivator.iff" result.attribute_template_id = -1 result.stfName("craft_item_ingredients_n","current_motivator") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
{ "content_hash": "a2c44efe21eb9b2d17945523970f3323", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 91, "avg_line_length": 26.46153846153846, "alnum_prop": 0.7180232558139535, "repo_name": "obi-two/Rebelion", "id": "2046762b2c1a99a7a51b473c0a57e95246e055cd", "size": "489", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "data/scripts/templates/object/tangible/component/item/quest_item/shared_current_motivator.py", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "11818" }, { "name": "C", "bytes": "7699" }, { "name": "C++", "bytes": "2293610" }, { "name": "CMake", "bytes": "39727" }, { "name": "PLSQL", "bytes": "42065" }, { "name": "Python", "bytes": "7499185" }, { "name": "SQLPL", "bytes": "41864" } ], "symlink_target": "" }