hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2bf88d6e618bbe5b3386a7b4eba80d58a45533e2 | 11,474 | py | Python | pdfreader/types/objects.py | tmcclintock/pdfreader | 2933e7b92b5ab7fd60e1a4a27c251de387ad098a | [
"MIT"
] | null | null | null | pdfreader/types/objects.py | tmcclintock/pdfreader | 2933e7b92b5ab7fd60e1a4a27c251de387ad098a | [
"MIT"
] | null | null | null | pdfreader/types/objects.py | tmcclintock/pdfreader | 2933e7b92b5ab7fd60e1a4a27c251de387ad098a | [
"MIT"
] | null | null | null | from ..utils import cached_property
from ..pillow import PILImageMixin
from .native import Stream, Dictionary, Array, Name
class StartXRef(object):
""" startxref
123
Pseudo object. Can be between indirect objects if any incremental updates.
"""
def __init__(self, offset):
self.offset = offset
def __repr__(self):
return "<StartXRef: {}>".format(self.offset)
def __eq__(self, other):
return self.offset == other.offset
class Trailer(object):
""" trailer
<<< ... .>>
endtrailer
Pseudo object. Can be between indirect objects if any incremental updates.
"""
def __init__(self, params):
self.params = params
def __repr__(self):
return "<Trailer: {}>".format(self.params)
def __eq__(self, other):
return self.params == other.params
class StreamBasedObject(Stream):
""" Stream-based object.
Automatically resolves indirect references on attributes access """
def __init__(self, doc, stream):
super(StreamBasedObject, self).__init__(stream.dictionary, stream.stream)
self.doc = doc
self._cache = dict()
@classmethod
def from_stream(cls, other):
obj = super(StreamBasedObject, StreamBasedObject).from_stream(other)
obj.doc = other.doc
def __getattr__(self, item):
if item in self._cache:
return self._cache[item]
obj = super(StreamBasedObject, self).__getattr__(item)
obj = self.doc.build(obj, lazy=True)
self._cache[item] = obj
return self._cache[item]
class ArrayBasedObject(Array):
""" Array-based object.
Automatically resolves indirect references on items access """
def __init__(self, doc, lst):
self.doc = doc
# ToDo: this must be lazier thing.
# Need to build objects only on access attempt only. For example: a.index, a[i], a[1:2] etc.
lst = [self.doc.build(obj, lazy=True) for obj in lst]
super(ArrayBasedObject, self).__init__(lst)
class DictBasedObject(Dictionary):
""" Dictionary-based object.
Automatically resolves indirect references on attributes/items access """
def __init__(self, doc, *args, **kwargs):
super(DictBasedObject, self).__init__(*args, **kwargs)
self.doc = doc
self._cache = dict()
def __getattr__(self, item):
return self.get(item)
def __getitem__(self, item):
if item in self._cache:
return self._cache[item]
obj = super(DictBasedObject, self).__getitem__(item)
obj = self.doc.build(obj, lazy=True)
self._cache[item] = obj
return self._cache[item]
def __delitem__(self, item): # real signature unknown
""" Delete self[key]. """
del self._cache[item]
def get(self, item, default=None):
try:
val = self[item]
except KeyError:
val = default
return val
# override defaults to build Dictionary values before returning
def keys(self):
return [k for k in super(DictBasedObject, self).keys()]
def values(self):
return [self[k] for k in super(DictBasedObject, self).keys()]
def items(self):
return [(k, self[k]) for k in super(DictBasedObject, self).keys()]
def pop(self, i, **kwargs):
if i in self:
res = self[i]
_ = super(DictBasedObject, self).pop(i)
else:
res = super(DictBasedObject, self).pop(i, **kwargs)
return res
def popitem(self):
if not self:
raise KeyError()
k = next(iter(super(DictBasedObject, self).keys()))
return k, self.pop(k)
def obj_factory(doc, obj):
klass = None
if isinstance(obj, Stream):
if obj.Type == 'XObject':
klass = XOBJECTS.get(obj.Subtype, XObject)
else:
klass = STREAM_BASED_OBJECTS.get(obj.Type, StreamBasedObject)
elif isinstance(obj, Dictionary):
klass = DICT_OBJECTS.get(obj.get('Type'), DictBasedObject)
elif isinstance(obj, list):
klass = ArrayBasedObject
return klass(doc, obj) if klass else obj
class ObjectStream(StreamBasedObject):
""" Type = ObjStm
"""
pass
class Catalog(DictBasedObject):
"""
Dictionary based object. (Type = Catalog)
See PDF 1.7 specification `sec. 7.7.2 - DocumentCatalog <https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf#page=71>`_
"""
pass
class PageTreeNode(DictBasedObject):
"""
Dictionary based object. (Type = Pages)
See PDF 1.7 specification `sec. 7.7.3.2 - Page Tree Nodes <https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf#page=76>`_
"""
def pages(self, node=None):
"""
Yields tree node pages one by one.
:return: :class:`~pdfreader.types.objects.Page` generator.
"""
if node is None:
node = self
for child in node.Kids:
if isinstance(child, Page):
yield child
else:
for page in self.pages(child):
yield page
class Page(DictBasedObject):
"""
Dictionary based Page object. (Type = Page)
See PDF 1.7 specification `sec. 7.7.3.3 - Page Objects <https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf#page=77>`_
"""
class XObject(StreamBasedObject):
"""
Stream based XObject object. (Type = XObject)
See PDF 1.7 specification `sec. 8.8 - External Objects <https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf#page=201>`_
"""
class CMap(StreamBasedObject):
""" Type = CMap
"""
class Metadata(StreamBasedObject):
""" Type = Metadata
Subtype = XML
"""
class Image(PILImageMixin, XObject):
"""
Stream based XObject object. (Type = XObject, Subtype = Image)
See PDF 1.7 specification `sec. 8.9 - Images <https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf#page=203>`_
"""
class Form(XObject):
"""
Stream based XObject object. (Type = XObject, Subtype = Form)
See PDF 1.7 specification `sec. 8.10 - Form XObjects <https://www.adobe.com/content/dam/acom/en/devnet/pdf/pdfs/PDF32000_2008.pdf#page=217>`_
"""
stream_content = XObject.filtered
class Group(XObject):
""" Type = XObject
Subtype = Group
"""
class OCG(DictBasedObject):
""" Type = OCG
Optional Content Group
"""
class OCMD(DictBasedObject):
""" Type = OCMD
Optional Content Membership Directory
"""
class Font(DictBasedObject):
""" Type = Font
"""
class Encoding(DictBasedObject):
""" Type = Encoding
"""
class CIDFont(DictBasedObject):
""" Type = CIDFont
"""
class FontDescriptior(DictBasedObject):
""" Type = FontDescriptior
"""
class Halftone(DictBasedObject):
""" Type = Halftone
"""
class Outlines(DictBasedObject):
""" Type = Outlines
"""
class Collection(DictBasedObject):
""" Type = Collection
"""
class CollectionField(DictBasedObject):
""" Type = CollectionField
"""
class CollectionSort(DictBasedObject):
""" Type = CollectionSort
"""
class CollectionSchema(DictBasedObject):
""" Type = CollectionSchema
"""
class PageLabel(DictBasedObject):
""" Type = PageLabel
"""
class Bread(DictBasedObject):
""" Type = Bread
"""
class Thread(DictBasedObject):
""" Type = Thread
"""
class Trans(DictBasedObject):
""" Type = Trans
"""
class NavNode(DictBasedObject):
""" Type = NavNode
"""
class Annot(DictBasedObject):
""" Type = Annot
"""
class Border(DictBasedObject):
""" Type = Border
"""
class Action(DictBasedObject):
""" Type = Action
"""
class Sig(DictBasedObject):
""" Type = Sig
"""
class SigRef(DictBasedObject):
""" Type = SigRef
"""
class TransformParams(DictBasedObject):
""" Type = TransformParams
"""
class Requirement(DictBasedObject):
""" Type = Requirement
"""
class ReqHandler(DictBasedObject):
""" Type = ReqHandler
"""
class Rendition(DictBasedObject):
""" Type = Rendition
"""
class MediaCriteria(DictBasedObject):
""" Type = MediaCriteria
"""
class MinBitDepth(DictBasedObject):
""" Type = MinBitDepth
"""
class MinScreenSize(DictBasedObject):
""" Type = MinScreenSize
"""
class MediaClip(DictBasedObject):
""" Type = MediaClip
"""
class MediaPermissions(DictBasedObject):
""" Type = MediaPermissions
"""
class MediaDuration(DictBasedObject):
""" Type = MediaDuration
"""
class MediaScreenParams(DictBasedObject):
""" Type = MediaScreenParams
"""
class FWParams(DictBasedObject):
""" Type = FWParams
"""
class MediaOffset(DictBasedObject):
""" Type = MediaOffset
"""
class Timespan(DictBasedObject):
""" Type = Timespan
"""
class MediaPlayers(DictBasedObject):
""" Type = MediaPlayers
"""
class MediaPlayerInfo(DictBasedObject):
""" Type = MediaPlayerInfo
"""
class SoftwareIdentifier(DictBasedObject):
""" Type = SoftwareIdentifier
"""
class Sound(DictBasedObject):
""" Type = Sound
"""
class SlideShow(DictBasedObject):
""" Type = SlideShow
"""
class LastModified(DictBasedObject):
""" Type = LastModified
"""
class StructTreeRoot(DictBasedObject):
""" Type = StructTreeRoot
"""
class StructElem(DictBasedObject):
""" Type = StructElem
"""
class MCR(DictBasedObject):
""" Type = MCR
Marked Content Reference
"""
class OBJR(DictBasedObject):
""" Type = OBJR
Object Reference
"""
STREAM_BASED_OBJECTS = {
'ObjectStream': ObjectStream,
'XObject': XObject,
'CMap': CMap,
'Metadata': Metadata
}
XOBJECTS = {'Image': Image,
'Form': Form,
'Group': Group}
DICT_OBJECTS = {
'Catalog': Catalog,
'Pages': PageTreeNode,
'Page': Page,
'OCG': OCG,
'OCMD': OCMD,
'Font': Font,
'Encoding': Encoding,
'CIDFont': CIDFont,
'FontDescriptior': FontDescriptior,
'Halftone': Halftone,
'Outlines': Outlines,
'Collection': Collection,
'CollectionField': CollectionField,
'CollectionSort': CollectionSort,
'CollectionSchema': CollectionSchema,
'PageLabel': PageLabel,
'Bread': Bread,
'Thread': Thread,
'Trans': Trans,
'NavNode': NavNode,
'Annot': Annot,
'Border': Border,
'Action': Action,
'Sig': Sig,
'SigRef': SigRef,
'TransformParams': TransformParams,
'Requirement': Requirement,
'ReqHandler': ReqHandler,
'Rendition': Rendition,
'MediaCriteria': MediaCriteria,
'MinBitDepth': MinBitDepth,
'MinScreenSize': MinScreenSize,
'MediaClip': MediaClip,
'MediaPermissions': MediaPermissions,
'MediaDuration': MediaDuration,
'MediaScreenParams': MediaScreenParams,
'FWParams': FWParams,
'MediaOffset': MediaOffset,
'Timespan': Timespan,
'MediaPlayers': MediaPlayers,
'MediaPlayerInfo': MediaPlayerInfo,
'SoftwareIdentifier': SoftwareIdentifier,
'Sound': Sound,
'SlideShow': SlideShow,
'LastModified': LastModified,
'StructTreeRoot': StructTreeRoot,
'StructElem': StructElem,
'MCR': MCR,
'OBJR': OBJR
}
| 21.855238 | 153 | 0.621666 | 9,056 | 0.789263 | 408 | 0.035559 | 150 | 0.013073 | 0 | 0 | 4,456 | 0.388356 |
2bf8bddedcd983aec3c3cc3cbf6f83ee94dbe06a | 4,402 | py | Python | portfolio2/tinydepparser/myparserutils.py | leonwetzel/Natural-Language-Processing | 5223faf13cf696e9094e3ad939b7fa4ea813c004 | [
"MIT"
] | null | null | null | portfolio2/tinydepparser/myparserutils.py | leonwetzel/Natural-Language-Processing | 5223faf13cf696e9094e3ad939b7fa4ea813c004 | [
"MIT"
] | null | null | null | portfolio2/tinydepparser/myparserutils.py | leonwetzel/Natural-Language-Processing | 5223faf13cf696e9094e3ad939b7fa4ea813c004 | [
"MIT"
] | null | null | null | """
An implementation of a greedy transition-based dependency parser (unlabeled parsing only).
Released under BSD license.
Code is an adapted version of Matthew Honnibal's parser:
https://explosion.ai/blog/parsing-english-in-python
-- change: move core logic to separate myparserutils file
modified by bplank, 03/2017
"""
#### Helper classes - do not modify ####
import os
from collections import defaultdict
import pickle
## Global
SHIFT = 0; RIGHT = 1; LEFT = 2
MOVES = (SHIFT, RIGHT, LEFT)
class DefaultList(list):
"""A list that returns a default value if index out of bounds."""
def __init__(self, default=None):
self.default = default
list.__init__(self)
def __getitem__(self, index):
try:
return list.__getitem__(self, index)
except IndexError:
return self.default
class Parse(object):
"""
Structure that keeps current set of edges/arcs A
lefts: left-branching edges
rights: right-branching edges
"""
def __init__(self, n):
self.n = n
self.heads = [None] * (n-1)
self.labels = [None] * (n-1)
self.lefts = []
self.rights = []
for i in range(n+1):
self.lefts.append(DefaultList(0))
self.rights.append(DefaultList(0))
def add(self, head, child, label=None):
self.heads[child] = head
self.labels[child] = label
if child < head:
self.lefts[head].append(child)
else:
self.rights[head].append(child)
#### End helper classes ####
class Perceptron(object):
"""
Learn weights for the features using the Perceptron algorithm
"""
def __init__(self, classes=None):
self.classes = classes
# Each feature gets its own weight vector, so weights is a dict-of-arrays
self.weights = {}
# The accumulated values, for the averaging. These will be keyed by
# feature/clas tuples
self._totals = defaultdict(int)
# The last time the feature was changed, for the averaging. Also
# keyed by feature/clas tuples
# (tstamps is short for timestamps)
self._tstamps = defaultdict(int)
# Number of instances seen
self.i = 0
def predict(self, features):
'''Dot-product the features and current weights and return the best class.'''
scores = self.score(features)
# Do a secondary alphabetic sort, for stability
return max(self.classes, key=lambda clas: (scores[clas], clas))
def score(self, features):
"""
Scores = features \cdot weights
"""
all_weights = self.weights
scores = dict((clas, 0) for clas in self.classes)
for feat, value in features:
if value == 0:
continue
if feat not in all_weights:
continue
weights = all_weights[feat]
for clas, weight in list(weights.items()):
scores[clas] += value * weight
return scores
def update(self, truth, guess, features):
"""
Update parameters
"""
def upd_feat(c, f, w, v):
param = (f, c)
self._totals[param] += (self.i - self._tstamps[param]) * w
self._tstamps[param] = self.i
self.weights[f][c] = w + v
self.i += 1
if truth == guess:
return None
for f, val in features:
weights = self.weights.setdefault(f, {})
upd_feat(truth, f, weights.get(truth, 0.0), 1.0)
upd_feat(guess, f, weights.get(guess, 0.0), -1.0)
def average_weights(self):
for feat, weights in list(self.weights.items()):
new_feat_weights = {}
for clas, weight in list(weights.items()):
param = (feat, clas)
total = self._totals[param]
total += (self.i - self._tstamps[param]) * weight
averaged = round(total / float(self.i), 3)
if averaged:
new_feat_weights[clas] = averaged
self.weights[feat] = new_feat_weights
def save(self, path):
print("Saving model to %s" % path)
pickle.dump(self.weights, open(path, 'wb'))
def load(self, path):
self.weights = pickle.load(open(path, 'rb')) ## fixed as gives an error in python3.8 GB
| 30.358621 | 97 | 0.581554 | 3,859 | 0.876647 | 0 | 0 | 0 | 0 | 0 | 0 | 1,282 | 0.291231 |
2bf8e9a59c36ff11a1fb86c0043541d05023080b | 4,886 | py | Python | senlin-7.0.0/senlin/tests/unit/api/middleware/test_context.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 45 | 2015-10-18T02:56:50.000Z | 2022-03-01T15:28:02.000Z | senlin-7.0.0/senlin/tests/unit/api/middleware/test_context.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | senlin-7.0.0/senlin/tests/unit/api/middleware/test_context.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 45 | 2015-10-19T02:35:57.000Z | 2021-09-28T09:01:42.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo_config import cfg
from oslo_config import fixture
from oslo_middleware import request_id
from oslo_policy import opts as policy_opts
import webob
from senlin.api.common import version_request as vr
from senlin.api.middleware import context
from senlin.common import exception
from senlin.tests.unit.common import base
policy_path = os.path.dirname(os.path.realpath(__file__)) + "/policy/"
class RequestContextMiddlewareTest(base.SenlinTestCase):
scenarios = [(
'empty_headers',
dict(
environ=None,
headers={},
expected_exception=None,
context_dict={
'auth_token': None,
'auth_token_info': None,
'auth_url': '',
'is_admin': False,
'password': None,
'roles': [],
'show_deleted': False,
'project': None,
'user': None,
'user_name': None
})
), (
'token_creds',
dict(
environ={'keystone.token_info': {'info': 123}},
headers={
'X-User-Id': '7a87ff18-31c6-45ce-a186-ec7987f488c3',
'X-Auth-Token': 'atoken2',
'X-Project-Name': 'my_project2',
'X-Project-Id': 'bb9108c8-62d0-4d92-898c-d644a6af20e9',
'X-Auth-Url': 'http://192.0.2.1:5000/v1',
'X-Roles': 'role1,role2,role3',
},
expected_exception=None,
context_dict={
'auth_token': 'atoken2',
'auth_token_info': {'info': 123},
'auth_url': 'http://192.0.2.1:5000/v1',
'is_admin': False,
'password': None,
'roles': ['role1', 'role2', 'role3'],
'show_deleted': False,
'project': 'bb9108c8-62d0-4d92-898c-d644a6af20e9',
'user': '7a87ff18-31c6-45ce-a186-ec7987f488c3',
'user_name': None
})
), (
'malformed_roles',
dict(
environ=None,
headers={
'X-Roles': [],
},
expected_exception=exception.NotAuthenticated)
)]
def setUp(self):
super(RequestContextMiddlewareTest, self).setUp()
self.fixture = self.useFixture(fixture.Config())
self.fixture.conf(args=['--config-dir', policy_path])
policy_opts.set_defaults(cfg.CONF)
cfg.CONF.set_override('policy_file', 'check_admin.json',
group='oslo_policy')
def test_context_middleware(self):
avr = vr.APIVersionRequest('1.0')
middleware = context.ContextMiddleware(None)
request = webob.Request.blank('/clusters', headers=self.headers,
environ=self.environ)
request.version_request = avr
if self.expected_exception:
self.assertRaises(
self.expected_exception, middleware.process_request, request)
else:
self.assertIsNone(middleware.process_request(request))
ctx = request.context.to_dict()
for k, v in self.context_dict.items():
self.assertEqual(v, ctx[k], 'Key %s values do not match' % k)
self.assertIsNotNone(ctx.get('request_id'))
def test_context_middleware_with_requestid(self):
avr = vr.APIVersionRequest('1.0')
middleware = context.ContextMiddleware(None)
request = webob.Request.blank('/clusters', headers=self.headers,
environ=self.environ)
req_id = 'req-5a63f0d7-1b69-447b-b621-4ea87cc7186d'
request.environ[request_id.ENV_REQUEST_ID] = req_id
request.version_request = avr
if self.expected_exception:
self.assertRaises(
self.expected_exception, middleware.process_request, request)
else:
self.assertIsNone(middleware.process_request(request))
ctx = request.context.to_dict()
for k, v in self.context_dict.items():
self.assertEqual(v, ctx[k], 'Key %s values do not match' % k)
self.assertEqual(
ctx.get('request_id'), req_id,
'Key request_id values do not match')
| 38.472441 | 77 | 0.579411 | 3,922 | 0.802702 | 0 | 0 | 0 | 0 | 0 | 0 | 1,447 | 0.296152 |
2bf97b8944bf5eb1ab4228528229eabb845da352 | 1,970 | py | Python | queue_from_track_id.py | codedwrench/sonosopencontroller | 56f6d99958c5e73691f2f39de490b314724db6f5 | [
"MIT"
] | 1 | 2017-04-09T08:24:05.000Z | 2017-04-09T08:24:05.000Z | queue_from_track_id.py | codedwrench/sonosopencontroller | 56f6d99958c5e73691f2f39de490b314724db6f5 | [
"MIT"
] | null | null | null | queue_from_track_id.py | codedwrench/sonosopencontroller | 56f6d99958c5e73691f2f39de490b314724db6f5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from soco import SoCo
from soco.data_structures import DidlItem, DidlResource
from soco.music_services import MusicService
from soco.compat import quote_url
device = SoCo("192.168.1.80") # <------- Your IP here
service = MusicService("Deezer") # <------ Your Music Service here
track_id = service.get_metadata(item_id='radio-37635')['mediaMetadata']['id']
def add_from_service(item_id, service, device, is_track=True):
# The DIDL item_id is made of the track_id (url escaped), but with an 8
# (hex) digit prefix. It is not clear what this is for, but it doesn't
# seem to matter (too much) what it is. We can use junk (thought the
# first digit must be 0 or 1), and the player seems to do the right
# thing. Real DIDL items sent to a player also have a title and a
# parent_id (usually the id of the relevant album), but they are not
# necessary. The flow charts at http://musicpartners.sonos.com/node/421
# and http://musicpartners.sonos.com/node/422 suggest that it is the job
# of the player, not the controller, to call get_metadata with a track
# id, so this might explain why no metadata is needed at this stage.
# NB: quote_url will break if given unicode on Py2.6, and early 2.7. So
# we need to encode.
item_id = quote_url(item_id.encode('utf-8'))
didl_item_id = "0fffffff{0}".format(item_id)
# For an album:
if not is_track:
uri = 'x-rincon-cpcontainer:' + didl_item_id
else:
# For a track:
uri = service.sonos_uri_from_id(item_id)
res = [DidlResource(uri=uri, protocol_info="DUMMY")]
didl = DidlItem(title="DUMMY",
# This is ignored. Sonos gets the title from the item_id
parent_id="DUMMY", # Ditto
item_id=didl_item_id,
desc=service.desc,
resources=res)
device.add_to_queue(didl)
add_from_service(track_id, service, device, True)
| 34.561404 | 77 | 0.688325 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,070 | 0.543147 |
2bf99724df5b4e9cffb019d5c688ef19980283e0 | 4,545 | py | Python | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/methodDefs_momentEstimator/updateCombinedPowerSums.py | HubertBalcerzak/Kratos | c15689d53f06dabb36dc44c13eeac73d3e183916 | [
"BSD-4-Clause"
] | null | null | null | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/methodDefs_momentEstimator/updateCombinedPowerSums.py | HubertBalcerzak/Kratos | c15689d53f06dabb36dc44c13eeac73d3e183916 | [
"BSD-4-Clause"
] | null | null | null | applications/MultilevelMonteCarloApplication/external_libraries/XMC/xmc/methodDefs_momentEstimator/updateCombinedPowerSums.py | HubertBalcerzak/Kratos | c15689d53f06dabb36dc44c13eeac73d3e183916 | [
"BSD-4-Clause"
] | null | null | null | # Import PyCOMPSs
# from exaqute.ExaquteTaskPyCOMPSs import * # to execute with runcompss
# from exaqute.ExaquteTaskHyperLoom import * # to execute with the IT4 scheduler
from exaqute.ExaquteTaskLocal import * # to execute with python3
def updatePowerSumsOrder1Dimension0():
pass
@ExaquteTask(samples={Type: COLLECTION_IN, Depth: 4},returns=1)
def updatePowerSumsOrder1Dimension0_Task(samples,*args):
return updatePowerSumsOrder1Dimension0(samples,*args)
def updatePowerSumsOrder2Dimension0(old_sample_counter,samples,power_sum_1,power_sum_2):
sample_counter = old_sample_counter
if (type(samples) is tuple):
samples = [samples]
for i in range (len(samples)):
sample = samples[i][0]
if (power_sum_1 == None):
power_sum_1 = sample[0][0]
power_sum_2 = sample[1][0]
sample_counter = sample_counter + sample[-1]
else:
power_sum_1 = power_sum_1 + sample[0][0]
power_sum_2 = power_sum_2 + sample[1][0]
sample_counter = sample_counter + sample[-1]
return sample_counter,power_sum_1,power_sum_2
@ExaquteTask(samples={Type: COLLECTION_IN, Depth: 4},returns=3)
def updatePowerSumsOrder2Dimension0_Task(counter,samples,*args):
return updatePowerSumsOrder2Dimension0(counter,samples,*args)
def updatePowerSumsOrder10Dimension0(old_sample_counter,samples,power_sum_1,power_sum_2,power_sum_3,power_sum_4,power_sum_5,power_sum_6,power_sum_7,power_sum_8,power_sum_9,power_sum_10):
sample_counter = old_sample_counter
if (type(samples) is tuple):
samples = [samples]
for i in range (len(samples)):
sample = samples[i][0]
if (power_sum_1 == None):
power_sum_1 = sample[0][0]
power_sum_2 = sample[1][0]
power_sum_3 = sample[2][0]
power_sum_4 = sample[3][0]
power_sum_5 = sample[4][0]
power_sum_6 = sample[5][0]
power_sum_7 = sample[6][0]
power_sum_8 = sample[7][0]
power_sum_9 = sample[8][0]
power_sum_10 = sample[9][0]
sample_counter = sample_counter + sample[-1]
else:
power_sum_1 = power_sum_1 + sample[0][0]
power_sum_2 = power_sum_2 + sample[1][0]
power_sum_3 = power_sum_3 + sample[2][0]
power_sum_4 = power_sum_4 + sample[3][0]
power_sum_5 = power_sum_5 + sample[4][0]
power_sum_6 = power_sum_6 + sample[5][0]
power_sum_7 = power_sum_7 + sample[6][0]
power_sum_8 = power_sum_8 + sample[7][0]
power_sum_9 = power_sum_9 + sample[8][0]
power_sum_10 = power_sum_10 + sample[9][0]
sample_counter = sample_counter + sample[-1]
return sample_counter,power_sum_1,power_sum_2,power_sum_3,power_sum_4,power_sum_5,power_sum_6,power_sum_7,power_sum_8,power_sum_9,power_sum_10
@ExaquteTask(samples={Type: COLLECTION_IN, Depth: 4},returns=11)
def updatePowerSumsOrder10Dimension0_Task(counter,samples,*args):
return updatePowerSumsOrder10Dimension0(counter,samples,*args)
def updatePowerSumsOrder2Dimension1(old_sample_counter,samples,power_sum_upper_1,power_sum_lower_1,power_sum_upper_2,power_sum_lower_2):
sample_counter = old_sample_counter
if (type(samples) is tuple):
samples = [samples]
for i in range (len(samples)):
sample_upper = samples[i][0]
if (type(samples[i][1]) is list): # index > 0
sample_lower = samples[i][1]
else: # index == 0
sample_lower = [[0.0],[0.0]]
if (power_sum_upper_1 == None):
power_sum_upper_1 = sample_upper[0][0]
power_sum_upper_2 = sample_upper[1][0]
power_sum_lower_1 = sample_lower[0][0]
power_sum_lower_2 = sample_lower[1][0]
sample_counter = sample_counter + sample_upper[-1]
else:
power_sum_upper_1 = power_sum_upper_1 + sample_upper[0][0]
power_sum_upper_2 = power_sum_upper_2 + sample_upper[1][0]
power_sum_lower_1 = power_sum_lower_1 + sample_lower[0][0]
power_sum_lower_2 = power_sum_lower_2 + sample_lower[1][0]
sample_counter = sample_counter + sample_upper[-1]
return sample_counter,power_sum_upper_1,power_sum_lower_1,power_sum_upper_2,power_sum_lower_2
@ExaquteTask(samples={Type: COLLECTION_IN, Depth: 4},returns=5)
def updatePowerSumsOrder2Dimension1_Task(counter,samples,*args):
return updatePowerSumsOrder2Dimension1(counter,samples,*args)
| 46.85567 | 186 | 0.678768 | 0 | 0 | 0 | 0 | 763 | 0.167877 | 0 | 0 | 219 | 0.048185 |
2bf9b04946af07ecfeb4321f3b7f8c1cc092942c | 9,264 | py | Python | tests-py/chat/client/client.py | moky/WormHole | 6b2b79274274f6764e0d519d384eb65489f4ca56 | [
"MIT"
] | 5 | 2020-05-24T03:35:00.000Z | 2021-06-05T00:27:54.000Z | tests-py/chat/client/client.py | moky/WormHole | 6b2b79274274f6764e0d519d384eb65489f4ca56 | [
"MIT"
] | null | null | null | tests-py/chat/client/client.py | moky/WormHole | 6b2b79274274f6764e0d519d384eb65489f4ca56 | [
"MIT"
] | 2 | 2020-09-11T05:29:11.000Z | 2022-03-13T15:45:22.000Z | # -*- coding: utf-8 -*-
import json
import threading
import time
from abc import abstractmethod
from typing import Optional
from dmtp.mtp import tlv
from dmtp import mtp
import dmtp
import stun
from .manager import ContactManager, FieldValueEncoder, Session
def time_string(timestamp: int) -> str:
time_array = time.localtime(timestamp)
return time.strftime('%y-%m-%d %H:%M:%S', time_array)
"""
DMTP Client
~~~~~~~~~~~
"""
class DMTPClientHandler:
@abstractmethod
def process_command(self, cmd: dmtp.Command, source: tuple) -> bool:
pass
@abstractmethod
def process_message(self, msg: dmtp.Message, source: tuple):
pass
class DMTPClient(dmtp.Client):
def __init__(self, port: int, host: str='127.0.0.1'):
super().__init__(local_address=(host, port))
self.__server_address = None
self.nat = 'Unknown'
# database for location of contacts
db = self._create_contact_manager()
db.identifier = 'moky-%d' % port
self.__database = db
self.delegate = db
# delegate for show message
self.handler: DMTPClientHandler = None
# punching threads
self.__punching = {}
def _create_contact_manager(self) -> ContactManager:
db = ContactManager(peer=self.peer)
db.identifier = 'anyone@anywhere'
return db
@property
def server_address(self) -> Optional[tuple]:
return self.__server_address
@server_address.setter
def server_address(self, value: tuple):
self.__server_address = value
@property
def identifier(self) -> str:
return self.__database.identifier
@identifier.setter
def identifier(self, value: str):
self.__database.identifier = value
def connect(self, remote_address: tuple): # -> Optional[dmtp.Connection]:
print('connecting to %s' % str(remote_address))
conn = self.peer.connect(remote_address=remote_address)
if conn is not None:
local_address = self.peer.local_address
self.__keep_punching(destination=remote_address, source=local_address)
return conn
#
# Client actions
#
def say_hello(self, destination: tuple) -> bool:
if super().say_hello(destination=destination):
return True
cmd = dmtp.HelloCommand.new(identifier=self.identifier)
print('send cmd: %s' % cmd)
self.send_command(cmd=cmd, destination=destination)
return True
def call(self, identifier: str) -> bool:
cmd = dmtp.CallCommand.new(identifier=identifier)
print('send cmd: %s' % cmd)
self.send_command(cmd=cmd, destination=self.__server_address)
return True
def login(self, identifier: str, server_address: tuple=None):
if server_address is None:
server_address = self.server_address
else:
self.__server_address = server_address
self.peer.connect(remote_address=server_address)
assert server_address is not None, 'server address not set'
self.__database.identifier = identifier
self.say_hello(destination=server_address)
def ping(self, remote_address: tuple, local_address: tuple=None):
res = self.peer.hub.send(data=b'PING', destination=remote_address, source=local_address)
return res == 4
def __keep_punching(self, destination: tuple, source: tuple):
t = self.__punching.get(destination)
if t is None:
print('start punching for %s ...' % str(destination))
t = PunchThread(dmtp_client=self, remote_address=destination, local_address=source)
self.__punching[destination] = t
t.start()
def __stop_punching(self, destination: tuple):
t = self.__punching.get(destination)
if t is not None:
assert isinstance(t, PunchThread), 'punching thread error: %s' % t
print('stop punching for %s' % str(destination))
t.stop()
self.__punching.pop(destination)
def process_command(self, cmd: dmtp.Command, source: tuple) -> bool:
print('received cmd from %s:\n\t%s' % (source, cmd))
if self.handler is not None:
self.handler.process_command(cmd=cmd, source=source)
return super().process_command(cmd=cmd, source=source)
def process_message(self, msg: dmtp.Message, source: tuple) -> bool:
print('received msg from %s:\n\t%s' % (source, msg))
if self.handler is not None:
self.handler.process_message(msg=msg, source=source)
# return super().process_message(msg=msg, source=source)
return True
def send_command(self, cmd: dmtp.Command, destination: tuple) -> mtp.Departure:
print('sending cmd to %s:\n\t%s' % (destination, cmd))
return super().send_command(cmd=cmd, destination=destination)
def send_message(self, msg: dmtp.Message, destination: tuple) -> mtp.Departure:
print('sending msg to %s:\n\t%s' % (destination, json.dumps(msg, cls=FieldValueEncoder)))
return super().send_message(msg=msg, destination=destination)
def get_sessions(self, identifier: str) -> list:
"""
Get connected locations for user ID
:param identifier: user ID
:return: connected locations and addresses
"""
sessions = []
assert self.delegate is not None, 'location delegate not set'
locations = self.delegate.get_locations(identifier=identifier)
now = int(time.time())
for loc in locations:
assert isinstance(loc, dmtp.LocationValue), 'location error: %s' % loc
source_address = loc.source_address
if source_address is not None:
conn = self.peer.get_connection(remote_address=source_address)
if conn is not None and conn.is_connected(now=now):
sessions.append(Session(location=loc, address=source_address))
continue
mapped_address = loc.mapped_address
if mapped_address is not None:
conn = self.peer.get_connection(remote_address=mapped_address)
if conn is not None and conn.is_connected(now=now):
sessions.append(Session(location=loc, address=mapped_address))
continue
return sessions
#
# Send
#
def send_text(self, receiver: str, msg: str) -> Optional[dmtp.Message]:
sessions = self.get_sessions(identifier=receiver)
if len(sessions) == 0:
print('user (%s) not login ...' % receiver)
# ask the server to help building a connection
self.call(identifier=receiver)
return None
content = msg.encode('utf-8')
msg = dmtp.Message.new(info={
'sender': self.identifier,
'receiver': receiver,
'time': int(time.time()),
'data': content,
})
for item in sessions:
assert isinstance(item, Session), 'session error: %s' % item
print('send msg to %s:\n\t%s' % (item.address, msg))
self.send_message(msg=msg, destination=item.address)
return msg
#
# PeerHandler
#
def received_command(self, cmd: tlv.Data, source: tuple, destination: tuple) -> bool:
self.__stop_punching(destination=source)
return super().received_command(cmd=cmd, source=source, destination=destination)
def received_message(self, msg: tlv.Data, source: tuple, destination: tuple) -> bool:
self.__stop_punching(destination=source)
return super().received_message(msg=msg, source=source, destination=destination)
class PunchThread(threading.Thread):
def __init__(self, dmtp_client: DMTPClient, remote_address: tuple, local_address: tuple=None):
super().__init__()
self.running = True
self.__dmtp_client = dmtp_client
self.__remote_address = remote_address
self.__local_address = local_address
def stop(self):
self.running = False
def run(self):
client = self.__dmtp_client
remote = self.__remote_address
local = self.__local_address
now = int(time.time())
timeout = now + 60
while self.running and now < timeout:
when = time_string(now)
print('[%s] sending "PING" to %s' % (when, remote))
client.ping(remote_address=remote, local_address=local)
time.sleep(0.5)
now = int(time.time())
# say HI after ping
client.say_hello(destination=remote)
"""
STUN Client
~~~~~~~~~~~
"""
class STUNClientHandler:
@abstractmethod
def feedback(self, msg: str):
pass
class STUNClient(stun.Client):
def __init__(self, host: str, port: int):
super().__init__(host=host, port=port)
self.server_address = None
self.handler: STUNClientHandler = None
# self.retries = 5
def info(self, msg: str):
when = time_string(int(time.time()))
message = '[%s] %s' % (when, msg)
print(message)
if self.handler is not None:
self.handler.feedback(msg=message)
| 34.184502 | 98 | 0.629642 | 8,761 | 0.945704 | 0 | 0 | 638 | 0.068869 | 0 | 0 | 1,059 | 0.114313 |
2bfaa9a987827890c7911e5206dd2cbfe6e55ff5 | 3,567 | py | Python | srpc/payload.py | fergul/py-SRPC | 14d75934ee8930c002db9d579533a1544df35f8a | [
"0BSD"
] | null | null | null | srpc/payload.py | fergul/py-SRPC | 14d75934ee8930c002db9d579533a1544df35f8a | [
"0BSD"
] | null | null | null | srpc/payload.py | fergul/py-SRPC | 14d75934ee8930c002db9d579533a1544df35f8a | [
"0BSD"
] | null | null | null | from struct import pack, unpack
from srpcDefs import Command
class Payload(object):
"""Basic payload header"""
def __init__(self, subport = 0, seqNo = 0, command = 0, fnum = 0, nfrags = 0,
buffer = None):
super(Payload, self).__init__()
self.buffer = buffer
self.subport = subport
self.seqNo = seqNo
self.command = command
self.fragment = fnum
self.fragmentCount = nfrags
if buffer is not None:
#Unpack subport (host order)
self.subport = unpack("@I", buffer[0:4])[0]
#Upack rest (network order)
(self.seqNo, self.command, self.fragment,
self.fragmentCount) = unpack(">IHBB", buffer[4:12])
def pack(self):
"""Pack subport in host order follwed by rest of packet in network order """
return pack("@I", self.subport) + pack(">IHBB", self.seqNo, self.command,
self.fragment, self.fragmentCount)
def __str__(self):
return "Payload - Subport: {}\n\tSeqNo: {}\
\n\tCommand: {}\n\tFragment: {}\n\tFragCount: {}".format(self.subport,
self.seqNo, self.command, self.fragment, self.fragmentCount)
class ControlPayload(Payload):
"""Payload for issuing commands to RPC host"""
def __init__(self, subport = 0, seqNo = 0, command = 0, fnum = 0,
nfrags = 0, buffer = None):
super(ControlPayload, self).__init__(subport, seqNo, command, fnum,
nfrags, buffer)
class ConnectPayload(Payload):
"""Payload for sending CONNECT command to a named service"""
def __init__(self, subport = 0, seqNo = 0, fnum = 0, nfrags = 0,
serviceName = None, buffer = None):
super(ConnectPayload, self).__init__(subport, seqNo, Command.CONNECT,
fnum, nfrags, buffer)
if buffer is None:
self.serviceName = serviceName
else:
# Calculate string size (buffer - payload size - null end byte)
ser_len = len(buffer) - 13
fmt = ">{}sx".format(ser_len)
self.serviceName = unpack(fmt, buffer[12:])[0]
def pack(self): #Pack payload and append serviceName
return super(ConnectPayload, self).pack() + self.serviceName
def __str__(self):
return "ConnectPayload:\n\tService: {}\n\t{}".format(
self.serviceName, super(ConnectPayload, self))
class DataPayload(Payload):
"""Payload for sending some data/fragments"""
def __init__(self, subport=0, seqNo=0, command=0, fnum=0, nfrags=0,
service=None, data_len=0, frag_len=0, data="", buffer=None):
super(DataPayload, self).__init__(subport, seqNo, command, fnum, nfrags,
buffer)
self.data_len = data_len
self.frag_len = len(data)
self.data = data
if buffer is not None: #Unpack a packet 'received'
#Unpack lengths from network order to host order
self.data_len, self.frag_len = unpack(">HH", buffer[12:16])
self.data = buffer[16:]
def pack(self):
"""Prepend packed payload to packed network ordered lengths and data"""
return (super(DataPayload, self).pack() +
pack(">HH", self.data_len, self.frag_len) + self.data)
def __str__(self):
return "DataPayload:\n\Data_len: {}\t Frag_len:{}\n\t{}".format(
self.data_len, self.frag_len, super(DataPayload, self))
| 44.037037 | 84 | 0.584244 | 3,498 | 0.980656 | 0 | 0 | 0 | 0 | 0 | 0 | 775 | 0.217269 |
2bfae8e287502bfa4fa79da3660a8aed6860e04e | 598 | py | Python | lista02Exec01.py | marcelocmedeiros/PrimeiraAvalaizcaoPython | 6390505530fc4f9acef2e3b93944547a3685d611 | [
"MIT"
] | null | null | null | lista02Exec01.py | marcelocmedeiros/PrimeiraAvalaizcaoPython | 6390505530fc4f9acef2e3b93944547a3685d611 | [
"MIT"
] | null | null | null | lista02Exec01.py | marcelocmedeiros/PrimeiraAvalaizcaoPython | 6390505530fc4f9acef2e3b93944547a3685d611 | [
"MIT"
] | null | null | null | #MARCELO CAMPOS DE MEDEIROS
#ADS UNIFIP P1 2020
#LISTA 02
'''
1- Faça um programa que solicite ao usuário o valor do litro de combustível (ex. 4,75)
e quanto em dinheiro ele deseja abastecer (ex. 50,00). Calcule quantos litros de
combustível o usuário obterá com esses valores.
'''
valor_gas = float(input('Qual o valor do combustível?R$ '))
num = float(input('Qual o valor que deseja abastecer?R$ '))
valor_tot = num / valor_gas
print('-=' * 35)
print(f'O valor abastecido foi R${num:.2f} e a quantidade de combustivél é {valor_tot:.2f}l.')
print('-=' * 35)
print(' OBRIGADO, VOLTE SEMPRE! ')
| 33.222222 | 94 | 0.714047 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 483 | 0.795717 |
2bfafe785a0c9b524d6065b296778e12ff5c00b9 | 4,982 | py | Python | py/lvmspec/pipeline/state.py | sdss/lvmspec | befd6991537c4947fdf63ca262937f2bb845148f | [
"BSD-3-Clause"
] | null | null | null | py/lvmspec/pipeline/state.py | sdss/lvmspec | befd6991537c4947fdf63ca262937f2bb845148f | [
"BSD-3-Clause"
] | null | null | null | py/lvmspec/pipeline/state.py | sdss/lvmspec | befd6991537c4947fdf63ca262937f2bb845148f | [
"BSD-3-Clause"
] | null | null | null | #
# See top-level LICENSE.rst file for Copyright information
#
# -*- coding: utf-8 -*-
"""
lvmspec.pipeline.state
===========================
Functions for manipulating the state of objects in the dependency graph.
"""
from __future__ import absolute_import, division, print_function
import os
import glob
import subprocess as sp
import numpy as np
from lvmutil.log import get_logger
from .. import util
from .. import io
from .common import *
from .graph import *
def graph_db_info():
"""
Return information about the runtime database.
Currently this returns info about the temporary yaml files
that contain a dump of the state. In the future this will
return connection information about the database.
Args: None
Returns:
tuple with elements
- file: name of the most recent state file
- stime: modification time of the file
- jobid: the slurm job name or POSIX PID
- running: True if the job is still running
"""
proddir = os.path.abspath(io.specprod_root())
rundir = os.path.join(proddir, "run")
file = ""
stime = 0
first = ""
last = ""
jobid = "-1"
running = False
statepat = re.compile(r'.*state_(.*).yaml')
slrmpat = re.compile(r'slurm-(.*)')
# Find the newest state file
for stfile in glob.glob(os.path.join(rundir, "state_*.yaml")):
thistime = os.path.getmtime(stfile)
if thistime > stime:
file = stfile
stime = thistime
statemat = statepat.match(stfile)
if statemat is None:
raise RuntimeError("state file matches glob but not regex- should never get here!")
jobid = statemat.group(1)
# See if this job is still running
slrmmat = slrmpat.match(jobid)
if slrmmat is None:
# we were just using bash...
pid = int(jobid)
if util.pid_exists(pid):
running = True
else:
slrmid = int(slrmmat.group(1))
state = sp.check_output("squeue -j {} 2>/dev/null | tail -1 | gawk '{{print $10}}'".format(slrmid), shell=True)
if state == 'R':
running = True
return (file, stime, jobid, running)
def graph_db_check(grph):
"""
Check the state of all objects in a graph.
This sets the state of all objects in the graph based on external
information. This might eventually involve querying a database.
For now, the filesystem is checked for the existance of the object.
Currently this marks all nodes as "none" or "done". The "running" and
"fail" states are overridden. This may change in the future.
Args:
grph (dict): the dependency graph.
Returns:
Nothing. The graph is modified in place.
"""
for name, nd in grph.items():
if type == "night":
nd["state"] = "done"
continue
path = graph_path(name)
if not os.path.isfile(path):
# file does not exist
nd["state"] = "none"
continue
if os.path.islink(path):
# this is a fake symlink- always done
nd["state"] = "done"
continue
tout = os.path.getmtime(path)
stale = False
for input in nd["in"]:
if grph[input]["type"] == "night":
continue
inpath = graph_path(input)
# if the input file exists, check if its timestamp
# is newer than the output.
if os.path.isfile(inpath):
tin = os.path.getmtime(inpath)
if tin > tout:
nd["state"] = "none"
stale = True
if not stale:
nd["state"] = "done"
return
def graph_db_read(file):
"""
Load the graph and all state info.
Construct the graph from the runtime database. For now, this
just reads a yaml dump.
Args:
file (str): the path to the file to write.
Returns:
dict: The dependency graph.
"""
return yaml_read(file)
def graph_db_write(grph):
"""
Synchronize graph data to disk.
This takes the in-memory graph and the states of all objects
and writes this information to disk. For now, this just dumps
to a yaml file. In the future, this function will modify a
database.
Args:
grph (dict): the dependency graph.
Returns:
Nothing.
"""
proddir = os.path.abspath(io.specprod_root())
rundir = os.path.join(proddir, "run")
jobid = None
if "SLURM_JOBID" in os.environ:
jobid = "slurm-{}".format(os.environ["SLURM_JOBID"])
else:
jobid = os.getpid()
stateroot = "state_{}".format(jobid)
statefile = os.path.join(rundir, "{}.yaml".format(stateroot))
statedot = os.path.join(rundir, "{}.dot".format(stateroot))
yaml_write(statefile, grph)
with open(statedot, "w") as f:
graph_dot(grph, f)
return
| 25.947917 | 119 | 0.59173 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,432 | 0.488157 |
2bfbf87d90271f02d7821b9fea4374b739a90559 | 6,180 | py | Python | rest_api/projects/serializers.py | joatuapp/joatu-django | 5626d03ba89c55650ff5bff2e706ca0883ae3b9c | [
"MIT"
] | 10 | 2018-05-13T18:01:57.000Z | 2018-12-23T17:11:14.000Z | rest_api/projects/serializers.py | joatuapp/joatu-django | 5626d03ba89c55650ff5bff2e706ca0883ae3b9c | [
"MIT"
] | 88 | 2018-05-04T15:33:46.000Z | 2022-03-08T21:09:21.000Z | rest_api/projects/serializers.py | joatuapp/joatu-django | 5626d03ba89c55650ff5bff2e706ca0883ae3b9c | [
"MIT"
] | 7 | 2018-05-08T16:05:06.000Z | 2018-09-13T05:49:05.000Z | from rest_framework import serializers
from projects.models import (
Project,
ProjectVolunteers,
ProjectVolunteersRegistration,
ProjectAttendees,
ProjectAttendeesRegistration,
ProjectDiscussion,
ProjectAnswerDiscussion,
ProjectHub,
)
class ProjectVolunteersRegistrationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProjectVolunteersRegistration
fields = ('url', 'profile', 'project_volunteers', 'project_volunteers_ref')
def create(self, validated_data):
project_volunteers = ProjectVolunteers.objects.get(pk=validated_data['project_volunteers_ref'])
registration = ProjectVolunteersRegistration.objects.create(
project_volunteers=project_volunteers,
**validated_data
)
count = ProjectVolunteersRegistration.objects.filter(
project_volunteers=project_volunteers
).count()
project_volunteers.registered = count
project_volunteers.save()
return registration
class ProjectVolunteersSerializer(serializers.HyperlinkedModelSerializer):
volunteers_registration = ProjectVolunteersRegistrationSerializer(many=True, read_only=True)
class Meta:
model = ProjectVolunteers
fields = (
'url',
'id',
'project',
'role',
'description',
'seats',
'registered',
'minimum_registration',
'volunteers_registration',
)
read_only_fields = ('registered', 'project', 'id')
class ProjectAttendeesRegistrationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProjectAttendeesRegistration
fields = ('url', 'profile', 'project_attendees', 'project_attendees_ref')
def create(self, validated_data):
project_attendees = ProjectAttendees.objects.get(pk=validated_data['project_attendees_ref'])
registration = ProjectAttendeesRegistration.objects.create(project_attendees=project_attendees, **validated_data)
count = ProjectAttendeesRegistration.objects.filter(project_attendees=project_attendees).count()
project_attendees.registered = count
project_attendees.save()
return registration
class ProjectAttendeesSerializer(serializers.HyperlinkedModelSerializer):
attendees_registration = ProjectAttendeesRegistrationSerializer(many=True, read_only=True)
class Meta:
model = ProjectAttendees
fields = (
'url',
'id',
'project',
'seats',
'registered',
'attendees_registration',
'minimum_registration',
)
read_only_fields = ('registered', 'project',)
class ProjectAnswerDiscussionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ProjectAnswerDiscussion
fields = ('url', 'id', 'discussion_ref', 'discussion', 'text', 'profile', 'created', 'updated')
read_only_fields = ('discussion', 'profile')
def create(self, validated_data):
project_discussion = ProjectDiscussion.objects.get(pk=validated_data['discussion_ref'])
answer = ProjectAnswerDiscussion.objects.create(discussion=project_discussion, **validated_data)
return answer
class ProjectDiscussionSerializer(serializers.HyperlinkedModelSerializer):
answer_discussion_project = ProjectAnswerDiscussionSerializer(many=True, read_only=True)
class Meta:
model = ProjectDiscussion
fields = (
'url',
'id',
'project',
'project_ref',
'title',
'text',
'profile',
'created',
'updated',
'answer_discussion_project',
)
read_only_fields = ('profile', 'project', 'id')
def create(self, validated_data):
project = Project.objects.get(pk=validated_data['project_ref'])
new_discussion = ProjectDiscussion.objects.create(project=project, **validated_data)
return new_discussion
class ProjectSerializer(serializers.HyperlinkedModelSerializer):
attendees = ProjectAttendeesSerializer()
volunteers = ProjectVolunteersSerializer(many=True)
discussion_project = ProjectDiscussionSerializer(many=True, read_only=True)
### cause of the error :
#serializers.HyperlinkedRelatedField(
# many=True,
# view_name='discussion_project',
# read_only=True
#)
class Meta:
model = Project
fields = ('url', 'id', 'name', 'start',
'end', 'description', 'category',
'sub_category', 'oth_category', 'oth_sub_cat','place_name', 'number', 'street',
'postal_code', 'city', 'organizer', 'created',
'updated', 'project_type', 'attendees',
'volunteers', 'discussion_project')
read_only_fields = ('organizer', 'id')
def create(self, validated_data):
attendees_data = validated_data.pop('attendees')
volunteers_data = validated_data.pop('volunteers')
new_project = Project.objects.create(**validated_data)
if validated_data['project_type'] == 'CO':
ProjectAttendees.objects.create(project=new_project, **attendees_data)
elif validated_data['project_type'] == 'CP':
for volunteer_data in volunteers_data:
ProjectVolunteers.objects.create(project=new_project, **volunteer_data)
else:
ProjectAttendees.objects.create(project=new_project, **attendees_data)
for volunteer_data in volunteers_data:
ProjectVolunteers.objects.create(project=new_project, **volunteer_data)
return new_project
class ProjectShortSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ('url', 'id', 'name', 'start', 'created', 'updated',)
class ProjectHubSerializer(serializers.HyperlinkedModelSerializer):
project = ProjectSerializer()
class Meta:
model = ProjectHub
fields = ('project', 'distance_km', 'lat', 'lng')
| 36.352941 | 121 | 0.66521 | 5,886 | 0.952427 | 0 | 0 | 0 | 0 | 0 | 0 | 1,137 | 0.183981 |
2bfc0a8be370fd6ea1e8a783e88ab1afd8416818 | 1,165 | py | Python | scripts/de-duplication.py | Jonescy/NewsCrawler | 4c2ee03785a2c4d622aeff27fc4efcadfda4546d | [
"MIT"
] | 2 | 2021-01-10T19:28:42.000Z | 2021-03-28T17:42:48.000Z | scripts/de-duplication.py | Jonescy/NewsCrawler | 4c2ee03785a2c4d622aeff27fc4efcadfda4546d | [
"MIT"
] | 6 | 2022-01-13T06:16:55.000Z | 2022-03-14T06:19:58.000Z | scripts/de-duplication.py | Jonescy/focusnews | 4c2ee03785a2c4d622aeff27fc4efcadfda4546d | [
"MIT"
] | null | null | null | """
@Author: Jonescyna@gmail.com
@Created: 2021/3/10
@Application: 作用在mongodb去重
"""
import pymongo
from NewsCrawler.settings import MONGO_URL
client = pymongo.MongoClient(MONGO_URL, maxPoolSize=1024)
def find_duplicate(collection):
collection.aggregate([
{'$group': {
'_id': {'title': "$title", 'published': "$published", "link": "$link"}, # 去重字段
'uniqueIds': {'$addToSet': "$_id"}, # 重复数据的id
'count': {'$sum': 1} # 重复次数
}},
{'$match': {
'count': {'$gt': 1} # 匹配重复次数大于1的数据
}},
{'$out': tmp_colName} # 输出的表名
], allowDiskUse=True)
def del_dup(tmp_collection, source_collection):
# 保留一位
for a in tmp_collection.find():
for b in a['uniqueIds'][1:]:
source_collection.delete_one({"_id": b})
tmp_col.drop() # 删除中间表
if __name__ == '__main__':
tmp_colName = "tmp_news" # 中间表名
col_list = ['caijing', 'ce', 'eastmoney', 'hexun', 'news', 'newsqq', 'sina', 'wangyi']
for i in col_list:
col = client['news'][i]
tmp_col = client['news'][tmp_colName]
find_duplicate(col)
del_dup(tmp_col, col)
| 27.093023 | 91 | 0.573391 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 476 | 0.378078 |
2bfd6ec0c174476e4cb09ec60830cc0bdf4b7014 | 861 | py | Python | app/alembic/versions/a1c5591554f0_create_score_table.py | johndatserakis/find-the-state-api | 81da6c37eaf635ddfc01cb9964d0d173248721c7 | [
"MIT"
] | 1 | 2021-12-23T15:40:53.000Z | 2021-12-23T15:40:53.000Z | app/alembic/versions/a1c5591554f0_create_score_table.py | johndatserakis/find-the-state-api | 81da6c37eaf635ddfc01cb9964d0d173248721c7 | [
"MIT"
] | null | null | null | app/alembic/versions/a1c5591554f0_create_score_table.py | johndatserakis/find-the-state-api | 81da6c37eaf635ddfc01cb9964d0d173248721c7 | [
"MIT"
] | null | null | null | """Create score table
Revision ID: a1c5591554f0
Revises: a6517320e072
Create Date: 2021-04-23 23:09:22.801565
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID
# revision identifiers, used by Alembic.
revision = "a1c5591554f0"
down_revision = "a6517320e072"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"scores",
sa.Column(
"id",
UUID(as_uuid=True),
primary_key=True,
server_default=sa.text("uuid_generate_v4()"),
),
sa.Column("score", sa.String, index=True, nullable=False),
sa.Column("updated_date", sa.DateTime),
sa.Column(
"created_date", sa.DateTime, server_default=sa.text("now()"), nullable=False
),
)
def downgrade():
op.drop_table("scores")
| 22.076923 | 88 | 0.639954 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 265 | 0.307782 |
2bff3d13e03ba432923ed45f1e62d385b93564e4 | 4,303 | py | Python | scripts/parse_tools/parse_object.py | SamuelTrahanNOAA/ccpp-framework | 16271557a692b2c6871bf4e2209b8035a9addc52 | [
"Apache-2.0"
] | null | null | null | scripts/parse_tools/parse_object.py | SamuelTrahanNOAA/ccpp-framework | 16271557a692b2c6871bf4e2209b8035a9addc52 | [
"Apache-2.0"
] | null | null | null | scripts/parse_tools/parse_object.py | SamuelTrahanNOAA/ccpp-framework | 16271557a692b2c6871bf4e2209b8035a9addc52 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""A module for the base, ParseObject class"""
# Python library imports
import re
# CCPP framework imports
from .parse_source import ParseContext, CCPPError
########################################################################
class ParseObject(ParseContext):
"""ParseObject is a simple class that keeps track of an object's
place in a file and safely produces lines from an array of lines
>>> ParseObject('foobar.F90', 1) #doctest: +ELLIPSIS
<__main__.ParseObject object at 0x...>
>>> ParseObject('foobar.F90', 1).filename
'foobar.F90'
>>> ParseObject('foobar.F90', ["##hi mom",], line_start=1).curr_line()
(None, 1)
>>> ParseObject('foobar.F90', ["first line","## hi mom"], line_start=1).curr_line()
('## hi mom', 1)
>>> ParseObject('foobar.F90', ["##hi mom",], line_start=1).next_line()
(None, 1)
>>> ParseObject('foobar.F90', ["##first line","## hi mom"], line_start=1).next_line()
('## hi mom', 1)
>>> ParseObject('foobar.F90', ["## hi \\\\","mom"], line_start=0).next_line()
('## hi mom', 0)
>>> ParseObject('foobar.F90', ["line1","##line2","## hi mom"], line_start=2).next_line()
('## hi mom', 2)
>>> ParseObject('foobar.F90', ["## hi \\\\","there \\\\","mom"], line_start=0).next_line()
('## hi there mom', 0)
>>> ParseObject('foobar.F90', ["!! line1","!! hi mom"], line_start=1).next_line()
('!! hi mom', 1)
"""
def __init__(self, filename, lines_in, line_start=0):
self._filename = filename
self._lines = lines_in
self._line_start = line_start
self._line_end = line_start
self._line_next = line_start
super(ParseObject, self).__init__(linenum=line_start, filename=filename)
@property
def first_line_num(self):
'Return the first line parsed'
return self._first_line
@property
def last_line_num(self):
'Return the last line parsed'
return self._line_end
def curr_line(self):
valid_line = self.line_num < len(self._lines)
_curr_line = None
_my_curr_lineno = self.line_num
if valid_line:
try:
_curr_line = self._lines[self.line_num].rstrip()
self._line_next = self.line_num + 1
self._line_end = self._line_next
except CCPPError as exc:
valid_line = False
# End if
# We allow continuation self._lines (ending with a single backslash)
if valid_line and _curr_line.endswith('\\'):
next_line, lnum = self.next_line()
if next_line is None:
# We ran out of lines, just strip the backslash
_curr_line = _curr_line[0:len(_curr_line)-1]
else:
_curr_line = _curr_line[0:len(_curr_line)-1] + next_line
# End if
# End if
# curr_line should not change the line number
self.line_num = _my_curr_lineno
return _curr_line, self.line_num
def next_line(self):
self.line_num = self._line_next
return self.curr_line()
def peek_line(self, line_num):
if (line_num >= 0) and (line_num < len(self._lines)):
return self._lines[line_num]
else:
return None
# End if
def reset_pos(self, line_start=0):
if (line_start < 0) or (line_start >= len(self._lines)):
raise CCPPError('Attempt to reset_pos to non-existent line, {}'.format(line_start))
else:
self.line_num = line_start
self._line_next = line_start
# End if
def write_line(self, line_num, line):
"Overwrite line, <line_num> with <line>"
if (line_num < 0) or (line_num >= len(self._lines)):
raise CCPPError('Attempt to write non-existent line, {}'.format(line_num))
else:
self._lines[line_num] = line
# End if
def __del__(self):
try:
del self._lines
del self.regions
except Exception as e:
pass # Python does not guarantee much about __del__ conditions
# End try
########################################################################
if __name__ == "__main__":
import doctest
doctest.testmod()
| 35.858333 | 95 | 0.572391 | 3,904 | 0.907274 | 0 | 0 | 216 | 0.050198 | 0 | 0 | 1,881 | 0.437137 |
2bff57af33ceaea8237514bf259d6354d7c86834 | 8,203 | py | Python | spiders/spider.py | gt11799/xueqiu_spider | 72be1a983e3b648b902f12475fb914add7bd45e7 | [
"MIT"
] | 6 | 2016-09-08T07:30:42.000Z | 2021-02-03T18:20:03.000Z | spiders/spider.py | gt11799/xueqiu_spider | 72be1a983e3b648b902f12475fb914add7bd45e7 | [
"MIT"
] | null | null | null | spiders/spider.py | gt11799/xueqiu_spider | 72be1a983e3b648b902f12475fb914add7bd45e7 | [
"MIT"
] | 2 | 2017-02-07T08:30:18.000Z | 2018-04-24T14:01:01.000Z | # -*- coding: utf-8 -*-
import sys
import time
import json
import pickle
import hashlib
import requests
from urlparse import urljoin
from config import *
from spiders.common import *
from spiders.html_parser import *
from logs.log import logger
reload(sys)
sys.setdefaultencoding('utf8')
class Spider(object):
def __init__(self, user_name=None, password=None):
self.session = requests.Session()
self.uid = None
self.user_name = user_name
self.password = password
def get_hash(self, string):
m = hashlib.md5()
m.update(string)
return m.hexdigest()
def _request(self, url, params={}):
# 应该使用统一的request函数去请求,此处待重构
try:
response = self.session.get(url, headers=FOLLOWER_HEADER, params=params, timeout=10)
return response
except requests.ConnectionError, requests.ConnectTimeout:
logger.error('%s请求超时')
def visit_index(self):
self.session.get(BASE_URL, headers=BASE_HEADER)
def login(self):
url = urljoin(BASE_URL, LOGIN_URL)
if self.check_login():
logger.info('已经登录')
return
data = {
'areacode': 86,
'remember_me': 'on',
'username': self.user_name,
'password': self.get_hash(self.password),
}
if if_int(self.user_name):
data['telephone'] = data.pop('username')
response = self.session.post(url, headers=BASE_HEADER, data=data)
logger.debug(response.content)
if self.check_login():
logger.info('登录成功')
self.get_people_id('8276760920')
self.save_cookies()
return
raise ValueError('登录失败')
def save_cookies(self):
result = self.load_data()
with open('spiders/.session', 'wb') as f:
cookies = requests.utils.dict_from_cookiejar(self.session.cookies)
data = {
'cookies': cookies,
'uid': self.uid,
'user_name': self.user_name,
}
result[self.user_name] = data
pickle.dump(result, f)
@classmethod
def clear_cookies(cls):
with open('spiders/.session', 'wb') as f:
pickle.dump({}, f)
def load_data(self):
with open('spiders/.session') as f:
try:
return pickle.load(f)
except EOFError:
return {}
def load_cookies(self):
with open('spiders/.session') as f:
try:
data = pickle.load(f)
except EOFError:
return {}
result = data.get(self.user_name)
if not result:
logger.info("账户未登录")
return {}
self.uid = result['uid']
cookies = result['cookies']
return cookies
def check_login(self, load_cookie=True):
if load_cookie:
cookies = self.load_cookies()
response = self.session.get(BASE_URL, headers=BASE_HEADER,
cookies=cookies, allow_redirects=False)
else:
response = self.session.get(BASE_URL, headers=BASE_HEADER,
allow_redirects=False)
if response.status_code == 302:
if self.uid is not None:
return True
location = response.headers['Location']
uid = get_uid_from_url(location)
if uid:
self.uid = uid
return True
else:
logger.error(u"从跳转链接解析uid出错了")
return False
def get_people(self):
url = urljoin(BASE_URL, PEOPLE_URL)
respond = self.session.get(url, headers=BASE_HEADER)
result = get_people(respond.content)
logger.info('抓取了%s个大V' % len(result))
return result
def get_people_id(self, path):
url = urljoin(BASE_URL, path)
respond = self.session.get(url, headers=BASE_HEADER)
if respond.status_code == 200:
uid = get_people_id(respond.content)
return uid
else:
logger.error(u'抓取’%s‘用户的id失败' % path)
def get_followers(self, uid):
size = 1000
url = urljoin(BASE_URL, FOLLOWERS_URL)
params = {
'size': size,
'pageNo': 1,
'uid': uid,
'_': int(time.time() * 1000)
}
respond = self._request(url, params=params)
if not respond:
return []
data = respond.json()
max_page = data.get('maxPage')
if not max_page:
logger.error("获取粉丝失败")
logger.error(data)
raise ValueError("获取粉丝失败")
result = data['followers']
for page in range(1, max_page):
time.sleep(FOLLOWER_PAGE_INTEVAL)
logger.info('开始抓取第%s页的粉丝' % page)
params['pageNo'] = page
params['_'] = int(time.time() * 1000)
respond = self._request(url, params=params)
if not respond:
continue
data = respond.json()
result += data['followers']
return self.handle_followers(result)
def handle_followers(self, data):
return [(_['id'], _['screen_name']) for _ in data]
def get_chat_sequence_id(self, uid):
url = CHAT_HISTORY_URL % uid
params = {
'user_id': self.uid,
'limit': 30,
'_': int(time.time() * 1000)
}
cookies = self.load_cookies()
respond = self.session.get(url, headers=CHAT_HEADER, params=params, cookies=cookies)
if respond.status_code == 200:
data = respond.json()
if len(data) > 1:
return data[-1]['sequenceId']
else:
return 96878141
logger.error('获得聊天id失败')
logger.error(respond.content)
return False
def chat(self, uid, msg):
sequenceId = self.get_chat_sequence_id(uid)
if not sequenceId:
return False
data = {
'plain': msg,
'to_group': False,
'toId': uid,
'sequenceId': sequenceId + 1
}
params = {'user_id': self.uid}
cookies = self.load_cookies()
respond = self.session.post(CHAT_URL, headers=CHAT_HEADER, cookies=cookies,
params=params, data=json.dumps(data))
if respond.status_code == 200:
result = respond.json()
error = result.get('error')
if error:
print '发送消息出错了'
logger.debug(respond.content)
raise ValueError(error.encode('utf8'))
return True
logger.debug(respond.status_code)
logger.debug(respond.content)
return False
def post(self, msg, audience=[]):
p = {"api": "/statuses/update.json", "_": int(time.time() * 1000)}
cookie = self.load_cookies()
url = urljoin(BASE_URL, TOKEN_URL)
r = self.session.get(url, params=p, cookies=cookie,
headers=BASE_HEADER)
try:
token = r.json()['token']
except (IndexError, TypeError, ValueError):
logger.error("MLGB 出错了!")
logger.error("\n%s\n", r.text)
return
audience = ' @'.join(audience)
audience = ' @' + audience.strip()
msg = '%s %s' % (msg, audience)
logger.info('发送的内容是: %s' % msg)
msg = msg.encode().decode()
data = {"status": "<p>%s</p>" % msg, "session_token": token}
url = urljoin(BASE_URL, POST_URL)
r = self.session.post(url, data=data, cookies=cookie,
headers=BASE_HEADER)
if r.status_code == 200:
data = r.json()
if not data.get('error_code') > -1:
logger.debug("完事儿了.")
return
logger.error("MLGB 又出错了!")
logger.error("\n%s\n", r.text)
raise ValueError('发广播出错了')
def if_int(item):
try:
int(item)
except ValueError:
return False
return True
| 32.042969 | 96 | 0.534561 | 8,043 | 0.952623 | 0 | 0 | 121 | 0.014331 | 0 | 0 | 962 | 0.113941 |
920062b302b55db0f61f0c70e39b62272b298aab | 1,573 | py | Python | imago/cli.py | opencivicdata/imago | b0cc9379eec02358e25e075b006e6682f9c5fb10 | [
"BSD-3-Clause"
] | 8 | 2015-10-23T14:44:36.000Z | 2018-11-12T00:51:05.000Z | imago/cli.py | opencivicdata/imago | b0cc9379eec02358e25e075b006e6682f9c5fb10 | [
"BSD-3-Clause"
] | 45 | 2015-01-22T19:47:35.000Z | 2017-02-22T17:57:40.000Z | imago/cli.py | datamade/imago | f6a4b52b87d33e6d7a6eaf3c0d1f2dc6b78ce910 | [
"BSD-3-Clause"
] | 10 | 2015-02-25T19:35:05.000Z | 2019-02-15T17:41:19.000Z | import requests
import sys
def debug():
url, *fields = sys.argv[1:]
if fields == []:
print("")
print("Baseline Benchmark:")
print("")
benchmark(url)
field_param = []
for field in fields:
field_param.append(field)
print("")
print(" With fields: %s" % (", ".join(field_param)))
print("")
benchmark(url, fields=field_param)
def benchmark(url, **kwargs):
total_time = 0
count = 40
response = requests.get(url, params=kwargs).json()
meta, results, debug = [response[x] for x in ['meta', 'results', 'debug']]
if meta['count'] != len(results):
print("Meta count != result length!")
print("")
connection = debug['connection']
queries = connection['query']['list']
print("Made %s queries" % (connection['query']['count']))
print("Sorted by time:")
print("")
for query in sorted(queries, reverse=True, key=lambda x: float(x['time'])):
sql = query['sql']
if len(sql) >= 80:
sql = sql[:80] + "..."
print(" %s: %s" % (query['time'], sql))
print("")
print("Prefetched Fields:")
for field in debug['prefetch_fields']:
print(" %s" % (field))
for x in range(count):
time = requests.get(url, params=kwargs).json().get(
"debug")['time']['seconds']
total_time += time
sys.stdout.write(".")
sys.stdout.flush()
print("")
print("Total time (s): %s" % (total_time))
print("Per request (s) %s" % (total_time / count))
| 27.12069 | 79 | 0.539097 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 335 | 0.212969 |
920180be669a9b4ba8c22bfaeb59c4b059bb6f59 | 1,005 | py | Python | sympy/diffgeom/tests/test_class_structure.py | shilpiprd/sympy | 556e9c61b31d0d5f101cd56b43e843fbf3bcf121 | [
"BSD-3-Clause"
] | 8,323 | 2015-01-02T15:51:43.000Z | 2022-03-31T13:13:19.000Z | sympy/diffgeom/tests/test_class_structure.py | shilpiprd/sympy | 556e9c61b31d0d5f101cd56b43e843fbf3bcf121 | [
"BSD-3-Clause"
] | 15,102 | 2015-01-01T01:33:17.000Z | 2022-03-31T22:53:13.000Z | sympy/diffgeom/tests/test_class_structure.py | shilpiprd/sympy | 556e9c61b31d0d5f101cd56b43e843fbf3bcf121 | [
"BSD-3-Clause"
] | 4,490 | 2015-01-01T17:48:07.000Z | 2022-03-31T17:24:05.000Z | from sympy.diffgeom import Manifold, Patch, CoordSystem, Point
from sympy import symbols, Function
from sympy.testing.pytest import warns_deprecated_sympy
m = Manifold('m', 2)
p = Patch('p', m)
a, b = symbols('a b')
cs = CoordSystem('cs', p, [a, b])
x, y = symbols('x y')
f = Function('f')
s1, s2 = cs.coord_functions()
v1, v2 = cs.base_vectors()
f1, f2 = cs.base_oneforms()
def test_point():
point = Point(cs, [x, y])
assert point != Point(cs, [2, y])
#TODO assert point.subs(x, 2) == Point(cs, [2, y])
#TODO assert point.free_symbols == set([x, y])
def test_subs():
assert s1.subs(s1, s2) == s2
assert v1.subs(v1, v2) == v2
assert f1.subs(f1, f2) == f2
assert (x*f(s1) + y).subs(s1, s2) == x*f(s2) + y
assert (f(s1)*v1).subs(v1, v2) == f(s1)*v2
assert (y*f(s1)*f1).subs(f1, f2) == y*f(s1)*f2
def test_deprecated():
with warns_deprecated_sympy():
cs_wname = CoordSystem('cs', p, ['a', 'b'])
assert cs_wname == cs_wname.func(*cs_wname.args)
| 30.454545 | 62 | 0.610945 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 129 | 0.128358 |
920331ec416a3fdec60559ded876c4a34a7bd172 | 489 | py | Python | biconfigs/storages.py | antfu/two-way-configs.py | 8dcf00abf2920d7619da36bbfba33b473656f2a6 | [
"MIT"
] | 2 | 2016-09-28T10:50:25.000Z | 2022-03-24T18:41:18.000Z | biconfigs/storages.py | antfu/two-way-configs.py | 8dcf00abf2920d7619da36bbfba33b473656f2a6 | [
"MIT"
] | 1 | 2016-09-30T20:24:16.000Z | 2019-04-20T12:58:25.000Z | biconfigs/storages.py | antfu/biconfigs | 8dcf00abf2920d7619da36bbfba33b473656f2a6 | [
"MIT"
] | null | null | null | import codecs
__memory_storage = {}
def file_read(path):
with codecs.open(path, 'r', 'utf-8') as f:
return f.read()
def file_write(path, text):
with codecs.open(path, 'w', 'utf-8') as f:
return f.write(text)
def memory_write(key, data):
__memory_storage[key] = data
STORAGES = {
'file': {
'read': file_read,
'write': file_write
},
'memory': {
'read': lambda x: __memory_storage[x],
'write': memory_write
}
}
| 18.807692 | 46 | 0.572597 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 60 | 0.122699 |
9204509b7b416569750a4002597915c7ee96c306 | 895 | py | Python | tests/test_oauth.py | Dephilia/poaurk | 8482effc35030c316c311dbda8a2e0b2e0754cda | [
"MIT"
] | 1 | 2021-11-25T16:55:16.000Z | 2021-11-25T16:55:16.000Z | tests/test_oauth.py | Dephilia/poaurk | 8482effc35030c316c311dbda8a2e0b2e0754cda | [
"MIT"
] | null | null | null | tests/test_oauth.py | Dephilia/poaurk | 8482effc35030c316c311dbda8a2e0b2e0754cda | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2021 dephilia <dephilia@MacBook-Pro.local>
#
# Distributed under terms of the MIT license.
"""
"""
import unittest
from poaurk import (PlurkAPI, PlurkOAuth)
class TestOauthMethods(unittest.TestCase):
def test_class(self):
oauth = PlurkOAuth("z3kiB2tbqrlC", "u8mCwet8BQNjROfUZU8A6BHc1o9rx1AE")
oauth.authorize()
def test_upper(self):
self.assertEqual('foo'.upper(), 'FOO')
def test_isupper(self):
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_split(self):
s = 'hello world'
self.assertEqual(s.split(), ['hello', 'world'])
# check that s.split fails when the separator is not a string
with self.assertRaises(TypeError):
s.split(2)
if __name__ == '__main__':
unittest.main()
| 24.189189 | 78 | 0.643575 | 604 | 0.674107 | 0 | 0 | 0 | 0 | 0 | 0 | 340 | 0.379464 |
9204b08a2c869643862f0dd4db1c4a7238352ef3 | 18,040 | py | Python | test/functional/bsv-journal-mempool-reorg-ordering.py | AustEcon/bitcoin-sv | bc5a01a621893b63d7615d1c7dc2245bae47e8bb | [
"OML"
] | 2 | 2021-06-12T10:10:21.000Z | 2021-06-16T00:03:43.000Z | test/functional/bsv-journal-mempool-reorg-ordering.py | MatterPool/bitcoin-sv-1 | c0e0cbd801ec46c0bd5bf6c91903eb943166f279 | [
"OML"
] | null | null | null | test/functional/bsv-journal-mempool-reorg-ordering.py | MatterPool/bitcoin-sv-1 | c0e0cbd801ec46c0bd5bf6c91903eb943166f279 | [
"OML"
] | 2 | 2020-08-20T20:24:42.000Z | 2021-01-21T09:24:56.000Z | #!/usr/bin/env python3
# Copyright (c) 2019 Bitcoin Association
# Distributed under the Open BSV software license, see the accompanying file LICENSE.
'''
Check different scenarios on how reorg affects contents of mempool and journal.
# chain reorg as a set operation on the chains of blocks containing sets of transactions
# (set(old_tip)) + set(mempool)) - set(new_tip) = new_mempool
# 0 ( A + {} ) - {} = A
# 1 ( {} + B ) - {} = B
# 2 ( C1 + C2 ) - {} = C1+C2
# 3 ( D + {} ) - D = {}
# 4 ( {} + E ) - E = {}
# 5 ( F1 + F2 ) - F1 = F2
# 6 ( G1 + G2 ) - G1+G2 = {}
# 7 ( Hx + {} ) - Hy = {}
# 8 ( Ix1 + Ix2 ) - Iy = {}
Where:
- Each letter is a separate (valid) transaction chain
- suffixes `x` and `y` are doublespend variants chains starting at the same UTXO
- suffixes `1` and `2` are first and second part of the same transaction chain
Two mechanisms for forcing a reorg are tested:
- new_tip is made better(longer) than old_tip
- old_tip is invalidated and so the equally good new_tip is chosen.
'''
from time import sleep
import socket
import itertools
import heapq
from test_framework.blocktools import create_block, create_coinbase
from test_framework.cdefs import ONE_GIGABYTE
from test_framework.key import CECKey
from test_framework.mininode import CTransaction, msg_tx, CTxIn, COutPoint, CTxOut, msg_block, msg_tx
from test_framework.script import CScript, SignatureHashForkId, SIGHASH_ALL, SIGHASH_FORKID, OP_CHECKSIG
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import wait_until
_cntr = 1
def new_key():
k = CECKey()
global _cntr
k.set_secretbytes(_cntr.to_bytes(6, byteorder='big'))
_cntr += 1
return k
class UTXO:
def __init__(self, tx, ndx, key):
self.tx = tx
self.ndx = ndx
self.key = key
_cntr2 = 1
def get_tip(connection):
return connection.rpc.getblock(connection.rpc.getbestblockhash())
def get_block_hash(block):
if isinstance(block, dict):
return block["hash"]
else:
return block.hash
def knows_of_block(connection, block):
def predicate(*, _block_hash=get_block_hash(block)):
try:
tmp = connection.rpc.getblock(_block_hash)
print(f"node knows of block {tmp['hash']} by {_block_hash}")
assert tmp["hash"] == _block_hash
return True
except:
print(f"node knows noting about block {_block_hash}")
return False
return predicate
def block_is_tip(connection, block):
def predicate(*, _block_hash=get_block_hash(block)):
ret = connection.rpc.getbestblockhash() == _block_hash
if ret:
print(f"node tip is block {_block_hash}")
return ret
return predicate
def make_and_send_block_ex(connection, vtx, *, tip=None, wait_for_tip=True):
"Create and send block with coinbase, returns conbase (tx, key) tuple"
if tip is None:
tip = get_tip(connection)
else:
tip = connection.rpc.getblock(get_block_hash(tip))
coinbase_key = new_key()
coinbase_tx = create_coinbase(tip["height"] + 1, coinbase_key.get_pubkey())
coinbase_tx.rehash()
global _cntr2
_cntr2 += 1
block = create_block(int(tip["hash"], 16), coinbase_tx, tip["time"] + _cntr2)
if vtx:
block.vtx.extend(vtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
block.solve()
msg = msg_block(block)
connection.send_message(msg)
if wait_for_tip:
wait_until(block_is_tip(connection, block), timeout=15)
else:
wait_until(knows_of_block(connection, block), timeout=15)
return UTXO(coinbase_tx, 0, coinbase_key), connection.rpc.getblock(get_block_hash(block))
def make_and_send_block(connection, vtx, *, tip=None, wait_for_tip=True):
return make_and_send_block_ex(connection, vtx, tip, wait_for_tip)[0]
def create_tx(utxos, n_outputs, fee_delta=0):
total_input = 0
tx = CTransaction()
for utxo in utxos:
tx.vin.append(CTxIn(COutPoint(utxo.tx.sha256, utxo.ndx), b"", 0xffffffff))
total_input += utxo.tx.vout[utxo.ndx].nValue
amount_per_output = total_input // n_outputs - len(utxos)*300 - n_outputs*200 - 100 - fee_delta
new_utxos = []
for i in range(n_outputs):
k = new_key()
new_utxos.append(UTXO(tx, i, k))
tx.vout.append(CTxOut(amount_per_output, CScript([k.get_pubkey(), OP_CHECKSIG])))
for input_ndx, (utxo, input) in enumerate(zip(utxos, tx.vin)):
sighash = SignatureHashForkId(utxo.tx.vout[utxo.ndx].scriptPubKey, tx, input_ndx, SIGHASH_ALL | SIGHASH_FORKID, utxo.tx.vout[utxo.ndx].nValue)
input.scriptSig = CScript([utxo.key.sign(sighash) + bytes(bytearray([SIGHASH_ALL | SIGHASH_FORKID]))])
tx.rehash()
return tx, new_utxos
def split(utxos, n_inputs, n_outputs, fee_delta=0):
new_utxos = []
transactions = []
for _ in split_iter(utxos, n_inputs, n_outputs, new_utxos, transactions, fee_delta):
pass
return transactions, new_utxos
def split_iter(utxos, n_inputs, n_outputs, new_utxos=None, transactions=None, fee_delta=0):
for ndx in range(0, len(utxos), n_inputs):
tx, xx = create_tx(utxos[ndx : ndx+n_inputs], n_outputs, fee_delta)
if new_utxos is not None:
new_utxos.extend(xx)
if transactions is not None:
transactions.append(tx)
yield tx, xx
def make_tx_chain(utxo, chain_length, fee_delta=0):
def gen():
utxos = [utxo]
for a in range(chain_length):
tx, utxos = create_tx(utxos, 1, fee_delta=fee_delta)
yield tx
return list(gen())
def chop(x, n=2):
"""Chop sequence into n approximately equal slices
>>> chop(range(10), n=3)
[[0, 1, 2], [3, 4, 5, 6], [7, 8, 9]]
>>> chop(range(3), n=10)
[[], [0], [], [], [1], [], [], [], [2], []]
"""
x = list(x)
if n < 2:
return [x]
def gen():
m = len(x) / n
i = 0
for _ in range(n-1):
yield x[round(i):round(i + m)]
i += m
yield x[round(i):]
return list(gen())
def splice(*iters):
"""
>>> print(*splice('abc', 'de', 'f'))
a d f b e c
"""
nothing = object()
return (x
for x in itertools.chain(
*itertools.zip_longest(
*iters,
fillvalue=nothing))
if x is not nothing)
def make_blocks_from(conn, root_block, nblocks, *txs_lists, wait_for_tip=True):
def gen(root_block, nblocks):
for i, txs in enumerate(chop(splice(*txs_lists), n=nblocks), start=1):
_, root_block = make_and_send_block_ex(conn, txs, tip=root_block, wait_for_tip=wait_for_tip)
yield root_block
return list(gen(root_block, nblocks))
def submit_to_mempool(conn, *txs_lists):
txs = list(splice(*txs_lists))
expected_mempool_size = conn.rpc.getmempoolinfo()["size"] + len(txs)
for tx in txs:
conn.send_message(msg_tx(tx))
# All planned transactions should be accepted into the mempool
wait_until(lambda: conn.rpc.getmempoolinfo()["size"] == expected_mempool_size)
class property_dict(dict):
def __getattr__(self, k): return self.__getitem__(k)
def __setattr__(self, k, v): return self.__setitem__(k, v)
def tx_ids(txs):
return [tx if isinstance(tx, str) else tx.hash for tx in txs]
class tx_set_context(dict):
def __init__(self, context={}, **subsets):
context = dict(context)
context.update(subsets)
super().__init__((k, tx_ids(v)) for k,v in context.items())
class tx_set(set):
def __init__(self, _members=(), *, _name=None):
if isinstance(_members, tx_set):
_name = _name if _name is not None else _members._name
self._name = _name if _name is not None else 'set'
super().__init__(tx_ids(_members))
def explain(self, other, *, context):
if not isinstance(other, tx_set):
other = tx_set(other)
if not isinstance(context, tx_set_context):
context = tx_set_context(context)
ret = ""
explained = set()
for n, v in context.items():
if not self.intersection(v):
continue
if other.intersection(v):
ret += self._explain_range(n, v, other)
ret += " "
else:
ret += f"no {n} "
explained.update(other.intersection(v))
missing = self.difference(explained)
if missing:
if ret:
ret += "and "
ret += f"missing from {self._name} are "
for n, v in context.items():
if not self.intersection(v):
continue
if missing.intersection(v):
ret += self._explain_range(n, v, missing)
ret += " "
missing.difference_update(v)
if missing:
ret += ", ".join(sorted(missing))
ret += " "
unexpected = other.difference(self)
if unexpected:
if ret:
ret += "and "
ret += f"unexpected "
for n, v in context.items():
if unexpected.intersection(v):
ret += self._explain_range(n, v, unexpected)
ret += " "
unexpected.difference_update(v)
if unexpected:
ret += ", ".join(sorted(unexpected))
ret += " "
return f"{other._name} is {ret}"
def _explain_range(self, n, v, elements):
def find_slices():
last = None
for i in sorted(map(v.index, elements.intersection(v))):
if last is None:
last = slice(i, i+1)
elif last.stop == i:
last = slice(last.start, i+1)
else:
yield last
last = None
if last is not None:
yield last
def show_slices(slices):
for s in slices:
start = str(s.start) if s.start > 0 else ""
stop = str(s.stop) if s.start > 0 or s.stop < len(v) else ""
yield f"{n}[{start}:{stop}]" if s.start+1 != s.stop else f"{n}[{s.start}]"
return " ".join(show_slices(find_slices()))
c = property_dict(A="abc", B="def", C="ghi", Z="xyz")
e = tx_set(c.A + c.B + c.C, _name="'e'")
a = tx_set("abcdegixyq", _name="'a'")
# assert e == a, e.explain(a, context=c)
class ReorgTests(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def setup_network(self):
self.setup_nodes()
def setup_nodes(self):
self.add_nodes(self.num_nodes)
def run_test(self):
with self.run_node_with_connections("Xxxxxxxxxxxxx",
0,
["-checkmempool=1",
'-whitelist=127.0.0.1',
'-genesisactivationheight=1',
'-jbafillafternewblock=1'
],
number_of_connections=1) as (conn,):
self.log.info("making coinbase")
fee_delta = 10
utxo, _ = make_and_send_block_ex(conn, [])
conn.rpc.generate(110)
# we will mine two competing chains, old and new
# the old one will be one block longer than the new
# We will use several strategies to switch the chains
def reorg_by_invalidateblock(conn, old_chain, new_chain):
"""Invalidate last two blocks of the old chain to make the new chain longer"""
conn.rpc.invalidateblock(old_chain[-2]["hash"])
wait_until(lambda: conn.rpc.getbestblockhash() == new_chain[-1]["hash"], timeout=10)
return new_chain[-1]
def reorg_by_mining(conn, old_chain, new_chain):
"""Mine two more blocks on the new chain to make the new chain longer"""
more_chain = make_blocks_from(conn, new_chain[-1], 1, wait_for_tip=False)
more_chain = make_blocks_from(conn, more_chain[-1], 1, wait_for_tip=True)
return more_chain[-1]
reorg_strategies = [reorg_by_invalidateblock, reorg_by_mining]
txs, case_utxos = split([utxo], 1, len(reorg_strategies), fee_delta=fee_delta)
_, root_block_data = make_and_send_block_ex(conn, txs)
for strategy, case_utxo in zip(reorg_strategies, case_utxos):
self.check_reorg_cases(conn, root_block_data, strategy, case_utxo=case_utxo, fee_delta=fee_delta)
def check_reorg_cases(self, conn, root_block_data, instigate_reorg, case_utxo, fee_delta):
self.log.info("Check reorg cases with %s", instigate_reorg.__name__)
n_cases = 9
txs, new_utxos = split([case_utxo], 1, n_cases, fee_delta=fee_delta)
utxo, root_block_data = make_and_send_block_ex(conn, txs)
# stay below 25 chain limit as the whole thing may end up in the mempool
tx_chain_depth = 24
# see docstring above
self.log.info("preparing transactions")
chains = property_dict()
chains.A = make_tx_chain(new_utxos[0], tx_chain_depth, fee_delta=fee_delta)
chains.B = make_tx_chain(new_utxos[1], tx_chain_depth, fee_delta=fee_delta)
chains.C1, chains.C2 = chop(make_tx_chain(new_utxos[2], tx_chain_depth, fee_delta=fee_delta))
chains.D = make_tx_chain(new_utxos[3], tx_chain_depth, fee_delta=fee_delta)
chains.E = make_tx_chain(new_utxos[4], tx_chain_depth, fee_delta=fee_delta)
chains.F1, chains.F2 = chop(make_tx_chain(new_utxos[5], tx_chain_depth, fee_delta=fee_delta))
chains.G1, chains.G2 = chop(make_tx_chain(new_utxos[6], tx_chain_depth, fee_delta=fee_delta))
chains.Hx = make_tx_chain(new_utxos[7], tx_chain_depth, fee_delta=fee_delta)
chains.Hy = make_tx_chain(new_utxos[7], tx_chain_depth, fee_delta=fee_delta)
chains.Ix1, chains.Ix2 = chop(make_tx_chain(new_utxos[8], tx_chain_depth, fee_delta=fee_delta))
chains.Iy = make_tx_chain(new_utxos[8], tx_chain_depth, fee_delta=fee_delta)
nblocks = 5
self.log.info("preparing chain to be invalidated")
chain_to_be_invalidated = make_blocks_from(conn, root_block_data, nblocks + 1,
chains.A,
chains.C1,
chains.D,
chains.F1,
chains.G1,
chains.Hx,
chains.Ix1,
wait_for_tip=True)
self.log.info("submitting to mempool")
submit_to_mempool(conn,
chains.B,
chains.C2,
chains.E,
chains.F2,
chains.G2,
chains.Ix2)
self.log.info("preparing chain to be activated")
chain_to_activate = make_blocks_from(conn, root_block_data, nblocks,
chains.D,
chains.E,
chains.F1,
chains.G1 + chains.G2,
chains.Hy,
chains.Iy,
wait_for_tip=False)
self.log.info("check tip before reorg")
expected_tip = chain_to_be_invalidated[-1]
actual_tip = get_tip(conn)
assert expected_tip["hash"] == actual_tip["hash"]
self.log.info("instigating a reorg by %s", instigate_reorg.__name__)
expected_tip = instigate_reorg(conn, chain_to_be_invalidated, chain_to_activate)
self.log.info("check tip after reorg")
actual_tip = get_tip(conn)
assert expected_tip["hash"] == actual_tip["hash"]
conn.cb.sync_with_ping()
mempool_txs = []
# make sure that JBA has catched up
for i in range(5):
self.log.info("mining the mempool")
conn.rpc.generate(1)
tip = get_tip(conn)
mempool_txs.extend(tip["tx"][1:])
mempool_size = conn.rpc.getmempoolinfo()['size']
if not mempool_size:
break # everything is mined
self.log.info("give JBA some time to do it's thing")
sleep(0.1)
else:
assert False, "Mempool is not empty after {i} blocks, {n} transactions remaining.".format(
i=i, n=mempool_size)
actual_mempool = tx_set(mempool_txs, _name='actual_mempool')
expected_mempool = tx_set(chains.A +
chains.B +
chains.C1 +
chains.C2 +
chains.F2,
_name='expected_mempool')
assert expected_mempool == actual_mempool, expected_mempool.explain(actual_mempool, context=chains)
if __name__ == '__main__':
ReorgTests().main()
| 37.978947 | 150 | 0.561696 | 10,290 | 0.570399 | 2,258 | 0.125166 | 0 | 0 | 0 | 0 | 3,212 | 0.178049 |
92052e2fb1619d6d194ae1f403d4ea778640c5a8 | 394 | py | Python | datacite_rest/__init__.py | gu-eresearch/datacite-rest | 2569afc9df2a2380e4c001b881b28dd8288441ae | [
"MIT"
] | null | null | null | datacite_rest/__init__.py | gu-eresearch/datacite-rest | 2569afc9df2a2380e4c001b881b28dd8288441ae | [
"MIT"
] | null | null | null | datacite_rest/__init__.py | gu-eresearch/datacite-rest | 2569afc9df2a2380e4c001b881b28dd8288441ae | [
"MIT"
] | 1 | 2021-06-01T16:11:07.000Z | 2021-06-01T16:11:07.000Z | """ root module with metadata """
__title__ = 'datacite-rest'
__version__ = '0.0.1-dev0'
__author__ = 'Gary Burgmann'
__author_email__ = 'g.burgmann@griffith.edu.au'
__description__ = 'a package for managing dois'
__license__ = 'MIT'
try:
from .datacite_rest import DataCiteREST # noqa
except Exception:
# preserve import here but stops setup.py breaking due to dependencies
pass
| 28.142857 | 74 | 0.741117 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 213 | 0.540609 |
9209213a5ea6e0e21690051a33ad712a4ee48839 | 5,778 | py | Python | audio_utils/common/feature_transforms.py | SarthakYadav/audio-data-utils | 2b9c1a0472c9e0521cb02ac142e0a26d73314238 | [
"MIT"
] | null | null | null | audio_utils/common/feature_transforms.py | SarthakYadav/audio-data-utils | 2b9c1a0472c9e0521cb02ac142e0a26d73314238 | [
"MIT"
] | null | null | null | audio_utils/common/feature_transforms.py | SarthakYadav/audio-data-utils | 2b9c1a0472c9e0521cb02ac142e0a26d73314238 | [
"MIT"
] | null | null | null | import torch
import torchaudio
import numpy as np
from torch.nn.functional import normalize
from audio_utils.common.utilities import _check_transform_input
class BaseAudioParser(object):
def __init__(self, mode="after_batch"):
super().__init__()
assert mode in ['after_batch', "per_instance"]
if mode == "after_batch":
self.desired_dims = 3
else:
self.desired_dims = 2
def check_sample(self, audio_sample):
return _check_transform_input(audio_sample, desired_dims=self.desired_dims)
def __call__(self, audio):
raise NotImplementedError("Abstract method called")
class SpectrogramParser(BaseAudioParser):
def __init__(self,
window_length=400,
hop_length=160,
n_fft=400,
center=True,
window_fn=torch.hann_window,
pad=0,
pad_mode="reflect",
mode="after_batch"):
super(SpectrogramParser, self).__init__(mode)
self.n_fft = n_fft
self.hop_length = hop_length
self.window_length = window_length
self.center = center
self.return_complex = True
# self.window_fn = window_fn
self.window = window_fn(window_length)
self.pad = pad
self.pad_mode = pad_mode
# always returns complex
# therefore, output will be complex tensor, which is desired for SpecAugment
# Hence, add a transform on top to convert to absolute value and desired power
def __call__(self, batch):
"""
:param batch: float array/tensor of shape (N, T) or (T,) for a single input
:return: tensor of dtype complex
"""
batch = self.check_sample(batch)
batch = torchaudio.functional.spectrogram(
batch,
self.pad,
self.window,
self.n_fft,
self.hop_length,
self.window_length,
power=None, # power other than None enforces real valued spec
normalized=False, # normalization enforces real valued spec
center=self.center,
pad_mode=self.pad_mode,
onesided=True,
return_complex=True
)
return batch
class SpectrogramPostProcess:
def __init__(self,
window_length=400,
window_fn=torch.hann_window,
power=2,
normalize=False,
log_compress=True,
mode="after_batch",
mel_spec_override=False):
super(SpectrogramPostProcess, self).__init__()
self.power = power
self.normalize = normalize
self.window = window_fn(window_length)
self.log_compress = log_compress
self.mode = mode
if mel_spec_override:
self.log_compress = False
print("mel_spec_override flag is set, forcing log_compress to false")
if log_compress:
print("log_compression is set to True in SpectrogramPostProcess. If using MelScale down the line, disable it")
def __call__(self, batch):
"""
:param batch: float tensor of shape (N, F, T)
:return:
"""
if self.normalize:
batch /= self.window.pow(2.).sum().sqrt()
if self.power:
if self.power == 1.0:
batch = batch.abs()
else:
batch = batch.abs().pow(self.power)
if self.log_compress:
batch = torch.clamp(batch, min=1e-8, max=1e8)
batch = torch.log(batch)
if self.mode == "per_instance":
batch = batch.squeeze(0)
return batch
class ToMelScale(BaseAudioParser):
def __init__(self,
sample_rate=16000,
# window_length=400,
hop_length=160,
n_fft=1024,
n_mels=64,
fmin=60.0,
fmax=7800.0,
norm=None,
# center=True,
mel_scale="htk"):
super(ToMelScale, self).__init__()
self.sample_rate = sample_rate
# self.window_length = window_length
self.hop_length = hop_length
self.n_fft = n_fft
self.n_mels = n_mels
self.fmin = fmin
self.fmax = fmax
# self.center = center
self.mel_scale = torchaudio.transforms.MelScale(
self.n_mels,
self.sample_rate,
self.fmin,
self.fmax,
self.n_fft // 2 + 1,
norm,
mel_scale
)
def __call__(self, batch):
"""
Accepts output of SpectrogramParser -> ... -> SpectrogramPostProcess and converts it to MelScale
This pipeline allows us to use torchaudio.transforms.TimeStretching
:param batch:
:return:
"""
batch = self.mel_scale(batch)
batch = torch.clamp(batch, min=1e-8, max=1e8)
batch = torch.log(batch)
return batch
class RawAudioParser(BaseAudioParser):
"""
:param normalize_waveform
whether to N(0,1) normalize audio waveform
"""
def __init__(self, normalize_waveform=False):
super().__init__()
self.normalize_waveform = normalize_waveform
if self.normalize_waveform:
print("ATTENTION!!! Normalizing waveform")
def normalize_sample(self, audio):
return normalize(audio, 2, dim=-1)
def __call__(self, audio):
output = torch.from_numpy(audio.astype("float32")).float()
if self.normalize_waveform:
output = self.normalize_sample(output)
output = output.unsqueeze(0)
return output, None
| 32.1 | 122 | 0.573901 | 5,607 | 0.970405 | 0 | 0 | 0 | 0 | 0 | 0 | 1,283 | 0.222049 |
920aca29cb5a19cdc1c9d17f62a1e4f246af7860 | 112 | py | Python | datrie/run_test.py | nikicc/anaconda-recipes | 9c611a5854bf41bbc5e7ed9853dc71c0851a62ef | [
"BSD-3-Clause"
] | 130 | 2015-07-28T03:41:21.000Z | 2022-03-16T03:07:41.000Z | datrie/run_test.py | nikicc/anaconda-recipes | 9c611a5854bf41bbc5e7ed9853dc71c0851a62ef | [
"BSD-3-Clause"
] | 119 | 2015-08-01T00:54:06.000Z | 2021-01-05T13:00:46.000Z | datrie/run_test.py | nikicc/anaconda-recipes | 9c611a5854bf41bbc5e7ed9853dc71c0851a62ef | [
"BSD-3-Clause"
] | 72 | 2015-07-29T02:35:56.000Z | 2022-02-26T14:31:15.000Z | import string
import datrie
trie = datrie.Trie(string.ascii_lowercase)
trie[u'foo'] = 5
assert u'foo' in trie
| 14 | 42 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 12 | 0.107143 |
920ae2c4b1ffab09874d3603b23d3df7ab55beeb | 962 | py | Python | projects/models.py | plaf2000/webspec | 487ccccff088ddbda0e5e475aaad167a01f4aab2 | [
"MIT"
] | null | null | null | projects/models.py | plaf2000/webspec | 487ccccff088ddbda0e5e475aaad167a01f4aab2 | [
"MIT"
] | null | null | null | projects/models.py | plaf2000/webspec | 487ccccff088ddbda0e5e475aaad167a01f4aab2 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
class Project(models.Model):
hf = models.PositiveIntegerField(default=18000)
lf = models.PositiveIntegerField(default=0)
nfft_view = models.PositiveIntegerField(default=2048)
nfft_project = models.PositiveIntegerField(default=2048)
fft_window_view = models.PositiveIntegerField(default=2048)
fft_window_project = models.PositiveIntegerField(default=2048)
title = models.CharField(max_length=50)
description = models.TextField(blank=True)
created = models.DateTimeField(auto_now_add=True)
created_user = models.ForeignKey(User,on_delete=models.PROTECT,related_name='created_user')
last_edit = models.DateTimeField(auto_now=True)
last_edit_user = models.ForeignKey(User,on_delete=models.PROTECT,related_name='last_edit_user')
| 56.588235 | 104 | 0.697505 | 883 | 0.917879 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0.031185 |
920c06c03c25fe4afade1bef0ccab25a44b158a4 | 764 | py | Python | qiskit/aqua/algorithms/education/__init__.py | hushaohan/aqua | 8512bc6ce246a8b3cca1e5edb1703b6885aa7c5d | [
"Apache-2.0"
] | 2 | 2020-06-29T16:08:12.000Z | 2020-08-07T22:42:13.000Z | qiskit/aqua/algorithms/education/__init__.py | hushaohan/aqua | 8512bc6ce246a8b3cca1e5edb1703b6885aa7c5d | [
"Apache-2.0"
] | null | null | null | qiskit/aqua/algorithms/education/__init__.py | hushaohan/aqua | 8512bc6ce246a8b3cca1e5edb1703b6885aa7c5d | [
"Apache-2.0"
] | 1 | 2022-01-25T07:09:10.000Z | 2022-01-25T07:09:10.000Z | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" Education Package """
from .eoh import EOH
from .simon import Simon
from .deutsch_jozsa import DeutschJozsa
from .bernstein_vazirani import BernsteinVazirani
__all__ = ['EOH',
'Simon',
'DeutschJozsa',
'BernsteinVazirani']
| 29.384615 | 77 | 0.717277 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 559 | 0.731675 |
920f5ebefa994608aeedc97909ba734c752a93a4 | 3,950 | py | Python | tests/vision/metrics/vqa_test.py | shunk031/allennlp-models | d37c5fadeef9326808a84dda0bcfd210a078d6b1 | [
"Apache-2.0"
] | 402 | 2020-03-11T22:58:35.000Z | 2022-03-29T09:05:27.000Z | tests/vision/metrics/vqa_test.py | staceywhitmore-inl/allennlp-models | 1e89d5e51cb45f3e77a48d4983bf980088334fac | [
"Apache-2.0"
] | 116 | 2020-03-11T01:26:57.000Z | 2022-03-25T13:03:56.000Z | tests/vision/metrics/vqa_test.py | staceywhitmore-inl/allennlp-models | 1e89d5e51cb45f3e77a48d4983bf980088334fac | [
"Apache-2.0"
] | 140 | 2020-03-11T00:51:35.000Z | 2022-03-29T09:05:36.000Z | from typing import Any, Dict, List, Tuple, Union
import pytest
import torch
from allennlp.common.testing import (
AllenNlpTestCase,
multi_device,
global_distributed_metric,
run_distributed_test,
)
from allennlp_models.vision import VqaMeasure
class VqaMeasureTest(AllenNlpTestCase):
@multi_device
def test_vqa(self, device: str):
vqa = VqaMeasure()
logits = torch.tensor(
[[0.35, 0.25, 0.1, 0.1, 0.2], [0.1, 0.6, 0.1, 0.2, 0.0]], device=device
)
labels = torch.tensor([[0], [3]], device=device)
label_weights = torch.tensor([[1 / 3], [2 / 3]], device=device)
vqa(logits, labels, label_weights)
vqa_score = vqa.get_metric()["score"]
assert vqa_score == pytest.approx((1 / 3) / 2)
@multi_device
def test_vqa_accumulates_and_resets_correctly(self, device: str):
vqa = VqaMeasure()
logits = torch.tensor(
[[0.35, 0.25, 0.1, 0.1, 0.2], [0.1, 0.6, 0.1, 0.2, 0.0]], device=device
)
labels = torch.tensor([[0], [3]], device=device)
labels2 = torch.tensor([[4], [4]], device=device)
label_weights = torch.tensor([[1 / 3], [2 / 3]], device=device)
vqa(logits, labels, label_weights)
vqa(logits, labels, label_weights)
vqa(logits, labels2, label_weights)
vqa(logits, labels2, label_weights)
vqa_score = vqa.get_metric(reset=True)["score"]
assert vqa_score == pytest.approx((1 / 3 + 1 / 3 + 0 + 0) / 8)
vqa(logits, labels, label_weights)
vqa_score = vqa.get_metric(reset=True)["score"]
assert vqa_score == pytest.approx((1 / 3) / 2)
@multi_device
def test_does_not_divide_by_zero_with_no_count(self, device: str):
vqa = VqaMeasure()
assert vqa.get_metric()["score"] == pytest.approx(0.0)
def test_distributed_accuracy(self):
logits = [
torch.tensor([[0.35, 0.25, 0.1, 0.1, 0.2]]),
torch.tensor([[0.1, 0.6, 0.1, 0.2, 0.0]]),
]
labels = [torch.tensor([[0]]), torch.tensor([[3]])]
label_weights = [torch.tensor([[1 / 3]]), torch.tensor([[2 / 3]])]
metric_kwargs = {"logits": logits, "labels": labels, "label_weights": label_weights}
desired_accuracy = {"score": (1 / 3) / 2}
run_distributed_test(
[-1, -1],
global_distributed_metric,
VqaMeasure(),
metric_kwargs,
desired_accuracy,
exact=False,
)
def test_distributed_accuracy_unequal_batches(self):
logits = [
torch.tensor([[0.35, 0.25, 0.1, 0.1, 0.2], [0.35, 0.25, 0.1, 0.1, 0.2]]),
torch.tensor([[0.1, 0.6, 0.1, 0.2, 0.0]]),
]
labels = [torch.tensor([[0], [0]]), torch.tensor([[3]])]
label_weights = [torch.tensor([[1], [1]]), torch.tensor([[1 / 3]])]
metric_kwargs = {"logits": logits, "labels": labels, "label_weights": label_weights}
desired_accuracy = {"score": (1 + 1 + 0) / 3}
run_distributed_test(
[-1, -1],
global_distributed_metric,
VqaMeasure(),
metric_kwargs,
desired_accuracy,
exact=False,
)
def test_multiple_distributed_runs(self):
logits = [
torch.tensor([[0.35, 0.25, 0.1, 0.1, 0.2]]),
torch.tensor([[0.1, 0.6, 0.1, 0.2, 0.0]]),
]
labels = [torch.tensor([[0]]), torch.tensor([[3]])]
label_weights = [torch.tensor([[1 / 3]]), torch.tensor([[2 / 3]])]
metric_kwargs = {"logits": logits, "labels": labels, "label_weights": label_weights}
desired_accuracy = {"score": (1 / 3) / 2}
run_distributed_test(
[-1, -1],
global_distributed_metric,
VqaMeasure(),
metric_kwargs,
desired_accuracy,
exact=True,
number_of_runs=200,
)
| 36.238532 | 92 | 0.552911 | 3,686 | 0.933165 | 0 | 0 | 1,529 | 0.387089 | 0 | 0 | 142 | 0.035949 |
9210cd1a83170ca2842ecbe0ae9f1b20d789338b | 219 | py | Python | compare_two_values.py | jenildesai25/Visa_interview | 9077d01e122267e5708b12293c8cb6d9801cdc46 | [
"MIT"
] | null | null | null | compare_two_values.py | jenildesai25/Visa_interview | 9077d01e122267e5708b12293c8cb6d9801cdc46 | [
"MIT"
] | null | null | null | compare_two_values.py | jenildesai25/Visa_interview | 9077d01e122267e5708b12293c8cb6d9801cdc46 | [
"MIT"
] | null | null | null | VISA full time master's MCQ.
def func(a, b):
x = a
y = b
while x != y:
if x > y:
x = x - y
if x < y:
y = y - x
return x or y
print(func(2437, 875))
| 15.642857 | 29 | 0.374429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9210cf8d56fb443eb25582f2f85f87ed65726ed7 | 36,041 | py | Python | pzflow/flow.py | jfcrenshaw/pzflow | ed1a7082ee3038794632d59864961da381d2523c | [
"MIT"
] | 26 | 2021-02-01T22:10:04.000Z | 2022-03-18T14:54:36.000Z | pzflow/flow.py | jfcrenshaw/pzflow | ed1a7082ee3038794632d59864961da381d2523c | [
"MIT"
] | 48 | 2021-01-12T07:48:10.000Z | 2022-03-18T04:45:30.000Z | pzflow/flow.py | jfcrenshaw/pzflow | ed1a7082ee3038794632d59864961da381d2523c | [
"MIT"
] | 8 | 2021-02-03T06:21:45.000Z | 2022-01-29T17:15:47.000Z | import itertools
from typing import Any, Callable, Sequence, Tuple
import dill as pickle
import jax.numpy as np
import numpy as onp
import pandas as pd
from jax import grad, jit, ops, random
from jax.experimental.optimizers import Optimizer, adam
from pzflow import distributions
from pzflow.bijectors import Bijector_Info, InitFunction, Pytree
from pzflow.utils import build_bijector_from_info, gaussian_error_model
class Flow:
"""A normalizing flow that models tabular data.
Attributes
----------
data_columns : tuple
List of DataFrame columns that the flow expects/produces.
conditional_columns : tuple
List of DataFrame columns on which the flow is conditioned.
info : Any
Object containing any kind of info included with the flow.
Often describes the data the flow is trained on.
latent
The latent distribution of the normalizing flow.
Has it's own sample and log_prob methods.
"""
def __init__(
self,
data_columns: Sequence[str] = None,
bijector: Tuple[InitFunction, Bijector_Info] = None,
conditional_columns: Sequence[str] = None,
latent=None,
data_error_model: Callable = None,
condition_error_model: Callable = None,
autoscale_conditions: bool = True,
seed: int = 0,
info: Any = None,
file: str = None,
_dictionary: dict = None,
):
"""Instantiate a normalizing flow.
Note that while all of the init parameters are technically optional,
you must provide either data_columns and bijector OR file.
In addition, if a file is provided, all other parameters must be None.
Parameters
----------
data_columns : Sequence[str], optional
Tuple, list, or other container of column names.
These are the columns the flow expects/produces in DataFrames.
bijector : Bijector Call, optional
A Bijector call that consists of the bijector InitFunction that
initializes the bijector and the tuple of Bijector Info.
Can be the output of any Bijector, e.g. Reverse(), Chain(...), etc.
conditional_columns : Sequence[str], optional
Names of columns on which to condition the normalizing flow.
latent : distribution, optional
The latent distribution for the normalizing flow. Can be any of
the distributions from pzflow.distributions. If not provided,
a normal distribution is used with the number of dimensions
inferred.
data_error_model : Callable, optional
A callable that defines the error model for data variables.
data_error_model must take key, X, Xerr, nsamples as arguments where:
key is a jax rng key, e.g. jax.random.PRNGKey(0)
X is a 2 dimensional array of data variables, where the order
of variables matches the order of the columns in data_columns
Xerr is the corresponding 2 dimensional array of errors
nsamples is the number of samples to draw from the error distribution
data_error_model must return an array of samples with the shape
(X.shape[0], nsamples, X.shape[1]).
If data_error_model is not provided, a Gaussian error model is assumed.
condition_error_model : Callable, optional
A callable that defines the error model for conditional variables.
condition_error_model must take key, X, Xerr, nsamples as arguments where:
key is a jax rng key, e.g. jax.random.PRNGKey(0)
X is a 2 dimensional array of conditional variables, where the order
of variables matches the order of the columns in conditional_columns
Xerr is the corresponding 2 dimensional array of errors
nsamples is the number of samples to draw from the error distribution
condition_error_model must return an array of samples with the shape
(X.shape[0], nsamples, X.shape[1]).
If condition_error_model is not provided, a Gaussian error model is assumed.
autoscale_conditions : bool, default=True
Sets whether or not conditions are automatically standard scaled when
passed to a conditional flow. I recommend you leave this as True.
seed : int, default=0
The random seed for initial parameters
info : Any, optional
An object to attach to the info attribute.
file : str, optional
Path to file from which to load a pretrained flow.
If a file is provided, all other parameters must be None.
"""
# validate parameters
if (
data_columns is None
and bijector is None
and file is None
and _dictionary is None
):
raise ValueError("You must provide data_columns and bijector OR file.")
if data_columns is not None and bijector is None:
raise ValueError("Please also provide a bijector.")
if data_columns is None and bijector is not None:
raise ValueError("Please also provide data_columns.")
if any(
(
data_columns is not None,
bijector is not None,
conditional_columns is not None,
latent is not None,
data_error_model is not None,
condition_error_model is not None,
info is not None,
)
):
if file is not None:
raise ValueError(
"If providing a file, please do not provide any other parameters."
)
if _dictionary is not None:
raise ValueError(
"If providing a dictionary, please do not provide any other parameters."
)
if file is not None and _dictionary is not None:
raise ValueError("Only provide file or _dictionary, not both.")
# if file or dictionary is provided, load everything from it
if file is not None or _dictionary is not None:
save_dict = self._save_dict()
if file is not None:
with open(file, "rb") as handle:
save_dict.update(pickle.load(handle))
else:
save_dict.update(_dictionary)
if save_dict["class"] != self.__class__.__name__:
raise TypeError(
f"This save file isn't a {self.__class__.__name__}."
+ f"It is a {save_dict['class']}"
)
# load columns and dimensions
self.data_columns = save_dict["data_columns"]
self.conditional_columns = save_dict["conditional_columns"]
self._input_dim = len(self.data_columns)
self.info = save_dict["info"]
# load the latent distribution
self._latent_info = save_dict["latent_info"]
self.latent = getattr(distributions, self._latent_info[0])(
*self._latent_info[1]
)
# load the error models
self.data_error_model = save_dict["data_error_model"]
self.condition_error_model = save_dict["condition_error_model"]
# load the bijector
self._bijector_info = save_dict["bijector_info"]
init_fun, _ = build_bijector_from_info(self._bijector_info)
_, self._forward, self._inverse = init_fun(
random.PRNGKey(0), self._input_dim
)
self._params = save_dict["params"]
# load the conditional means and stds
self._condition_means = save_dict["condition_means"]
self._condition_stds = save_dict["condition_stds"]
# set whether or not to automatically standard scale any
# conditions passed to the normalizing flow
self._autoscale_conditions = save_dict["autoscale_conditions"]
# if no file is provided, use provided parameters
else:
self.data_columns = tuple(data_columns)
self._input_dim = len(self.data_columns)
self.info = info
if conditional_columns is None:
self.conditional_columns = None
self._condition_means = None
self._condition_stds = None
else:
self.conditional_columns = tuple(conditional_columns)
self._condition_means = np.zeros(len(self.conditional_columns))
self._condition_stds = np.ones(len(self.conditional_columns))
# set whether or not to automatically standard scale any
# conditions passed to the normalizing flow
self._autoscale_conditions = autoscale_conditions
# set up the latent distribution
if latent is None:
self.latent = distributions.Normal(self._input_dim)
else:
self.latent = latent
self._latent_info = self.latent.info
# set up the error models
if data_error_model is None:
self.data_error_model = gaussian_error_model
else:
self.data_error_model = data_error_model
if condition_error_model is None:
self.condition_error_model = gaussian_error_model
else:
self.condition_error_model = condition_error_model
# set up the bijector with random params
init_fun, self._bijector_info = bijector
bijector_params, self._forward, self._inverse = init_fun(
random.PRNGKey(seed), self._input_dim
)
self._params = (self.latent._params, bijector_params)
def _get_conditions(self, inputs: pd.DataFrame) -> np.ndarray:
"""Return an array of the bijector conditions."""
# if this isn't a conditional flow, just return empty conditions
if self.conditional_columns is None:
conditions = np.zeros((inputs.shape[0], 1))
# if this a conditional flow, return an array of the conditions
else:
columns = list(self.conditional_columns)
conditions = np.array(inputs[columns].values)
conditions = (conditions - self._condition_means) / self._condition_stds
return conditions
def _get_err_samples(
self,
key,
inputs: pd.DataFrame,
err_samples: int,
type: str = "data",
skip: str = None,
) -> np.ndarray:
"""Draw error samples for each row of inputs. """
X = inputs.copy()
# get list of columns
if type == "data":
columns = list(self.data_columns)
error_model = self.data_error_model
elif type == "conditions":
if self.conditional_columns is None:
return np.zeros((err_samples * X.shape[0], 1))
else:
columns = list(self.conditional_columns)
error_model = self.condition_error_model
else:
raise ValueError("type must be `data` or `conditions`.")
# make sure all relevant variables have error columns
for col in columns:
# if errors not provided for the column, fill in zeros
if f"{col}_err" not in inputs.columns and col != skip:
X[f"{col}_err"] = np.zeros(X.shape[0])
# if we are skipping this column, fill in nan's
elif col == skip:
X[col] = np.nan * np.zeros(X.shape[0])
X[f"{col}_err"] = np.nan * np.zeros(X.shape[0])
# pull out relevant columns
err_columns = [col + "_err" for col in columns]
X, Xerr = np.array(X[columns].values), np.array(X[err_columns].values)
# generate samples
Xsamples = error_model(key, X, Xerr, err_samples)
Xsamples = Xsamples.reshape(X.shape[0] * err_samples, X.shape[1])
# delete the column corresponding to skip
if skip is not None:
idx = columns.index(skip)
Xsamples = np.delete(Xsamples, idx, axis=1)
# if these are samples of conditions, standard scale them!
if type == "conditions":
Xsamples = (Xsamples - self._condition_means) / self._condition_stds
return Xsamples
def _log_prob(
self, params: Pytree, inputs: np.ndarray, conditions: np.ndarray
) -> np.ndarray:
"""Log prob for arrays."""
# calculate log_prob
u, log_det = self._forward(params[1], inputs, conditions=conditions)
log_prob = self.latent.log_prob(params[0], u) + log_det
# set NaN's to negative infinity (i.e. zero probability)
log_prob = np.nan_to_num(log_prob, nan=np.NINF)
return log_prob
def log_prob(
self, inputs: pd.DataFrame, err_samples: int = None, seed: int = None
) -> np.ndarray:
"""Calculates log probability density of inputs.
Parameters
----------
inputs : pd.DataFrame
Input data for which log probability density is calculated.
Every column in self.data_columns must be present.
If self.conditional_columns is not None, those must be present
as well. If other columns are present, they are ignored.
err_samples : int, default=None
Number of samples from the error distribution to average over for
the log_prob calculation. If provided, Gaussian errors are assumed,
and method will look for error columns in `inputs`. Error columns
must end in `_err`. E.g. the error column for the variable `u` must
be `u_err`. Zero error assumed for any missing error columns.
seed : int, default=None
Random seed for drawing the samples with Gaussian errors.
Returns
-------
np.ndarray
Device array of shape (inputs.shape[0],).
"""
if err_samples is None:
# convert data to an array with columns ordered
columns = list(self.data_columns)
X = np.array(inputs[columns].values)
# get conditions
conditions = self._get_conditions(inputs)
# calculate log_prob
return self._log_prob(self._params, X, conditions)
else:
# validate nsamples
assert isinstance(
err_samples, int
), "err_samples must be a positive integer."
assert err_samples > 0, "err_samples must be a positive integer."
# get Gaussian samples
seed = onp.random.randint(1e18) if seed is None else seed
key = random.PRNGKey(seed)
X = self._get_err_samples(key, inputs, err_samples, type="data")
C = self._get_err_samples(key, inputs, err_samples, type="conditions")
# calculate log_probs
log_probs = self._log_prob(self._params, X, C)
probs = np.exp(log_probs.reshape(-1, err_samples))
return np.log(probs.mean(axis=1))
def posterior(
self,
inputs: pd.DataFrame,
column: str,
grid: np.ndarray,
marg_rules: dict = None,
normalize: bool = True,
err_samples: int = None,
seed: int = None,
batch_size: int = None,
nan_to_zero: bool = True,
) -> np.ndarray:
"""Calculates posterior distributions for the provided column.
Calculates the conditional posterior distribution, assuming the
data values in the other columns of the DataFrame.
Parameters
----------
inputs : pd.DataFrame
Data on which the posterior distributions are conditioned.
Must have columns matching self.data_columns, *except*
for the column specified for the posterior (see below).
column : str
Name of the column for which the posterior distribution
is calculated. Must be one of the columns in self.data_columns.
However, whether or not this column is one of the columns in
`inputs` is irrelevant.
grid : np.ndarray
Grid on which to calculate the posterior.
marg_rules : dict, optional
Dictionary with rules for marginalizing over missing variables.
The dictionary must contain the key "flag", which gives the flag
that indicates a missing value. E.g. if missing values are given
the value 99, the dictionary should contain {"flag": 99}.
The dictionary must also contain {"name": callable} for any
variables that will need to be marginalized over, where name is
the name of the variable, and callable is a callable that takes
the row of variables nad returns a grid over which to marginalize
the variable. E.g. {"y": lambda row: np.linspace(0, row["x"], 10)}.
Note: the callable for a given name must *always* return an array
of the same length, regardless of the input row.
err_samples : int, default=None
Number of samples from the error distribution to average over for
the posterior calculation. If provided, Gaussian errors are assumed,
and method will look for error columns in `inputs`. Error columns
must end in `_err`. E.g. the error column for the variable `u` must
be `u_err`. Zero error assumed for any missing error columns.
seed : int, default=None
Random seed for drawing the samples with Gaussian errors.
batch_size : int, default=None
Size of batches in which to calculate posteriors. If None, all
posteriors are calculated simultaneously. Simultaneous calculation
is faster, but memory intensive for large data sets.
normalize : boolean, default=True
Whether to normalize the posterior so that it integrates to 1.
nan_to_zero : bool, default=True
Whether to convert NaN's to zero probability in the final pdfs.
Returns
-------
np.ndarray
Device array of shape (inputs.shape[0], grid.size).
"""
# get the index of the provided column, and remove it from the list
columns = list(self.data_columns)
idx = columns.index(column)
columns.remove(column)
nrows = inputs.shape[0]
batch_size = nrows if batch_size is None else batch_size
# make sure indices run 0 -> nrows
inputs = inputs.reset_index(drop=True)
if err_samples is not None:
# validate nsamples
assert isinstance(
err_samples, int
), "err_samples must be a positive integer."
assert err_samples > 0, "err_samples must be a positive integer."
# set the seed
seed = onp.random.randint(1e18) if seed is None else seed
key = random.PRNGKey(seed)
# empty array to hold pdfs
pdfs = np.zeros((nrows, len(grid)))
# if marginalization rules were passed, we will loop over the rules
# and repeatedly call this method
if marg_rules is not None:
# if the flag is NaN, we must use np.isnan to check for flags
if onp.isnan(marg_rules["flag"]):
def check_flags(data):
return onp.isnan(data)
# else we use np.isclose to check for flags
else:
def check_flags(data):
return onp.isclose(data, marg_rules["flag"])
# first calculate pdfs for unflagged rows
unflagged_idx = inputs[
~check_flags(inputs[columns]).any(axis=1)
].index.tolist()
unflagged_pdfs = self.posterior(
inputs=inputs.iloc[unflagged_idx],
column=column,
grid=grid,
err_samples=err_samples,
seed=seed,
batch_size=batch_size,
normalize=False,
nan_to_zero=nan_to_zero,
)
# save these pdfs in the big array
pdfs = ops.index_update(
pdfs,
ops.index[unflagged_idx, :],
unflagged_pdfs,
indices_are_sorted=True,
unique_indices=True,
)
# we will keep track of all the rows we've already calculated
# posteriors for
already_done = unflagged_idx
# now we will loop over the rules in marg_rules
for name, rule in marg_rules.items():
# ignore the flag, because that's not a column in the data
if name == "flag":
continue
# get the list of new rows for which we need to calculate posteriors
flagged_idx = inputs[check_flags(inputs[name])].index.tolist()
flagged_idx = list(set(flagged_idx).difference(already_done))
# if flagged_idx is empty, move on!
if len(flagged_idx) == 0:
continue
# get the marginalization grid for each row
marg_grids = (
inputs.iloc[flagged_idx]
.apply(rule, axis=1, result_type="expand")
.values
)
# make a new data frame with the marginalization grids replacing
# the values of the flag in the column
marg_inputs = pd.DataFrame(
np.repeat(
inputs.iloc[flagged_idx].values, marg_grids.shape[1], axis=0
),
columns=inputs.columns,
)
marg_inputs[name] = marg_grids.reshape(marg_inputs.shape[0], 1)
# remove the error column if it's present
marg_inputs.drop(f"{name}_err", axis=1, inplace=True, errors="ignore")
# calculate posteriors for these
marg_pdfs = self.posterior(
inputs=marg_inputs,
column=column,
grid=grid,
marg_rules=marg_rules,
err_samples=err_samples,
seed=seed,
batch_size=batch_size,
normalize=False,
nan_to_zero=nan_to_zero,
)
# sum over the marginalized dimension
marg_pdfs = marg_pdfs.reshape(
len(flagged_idx), marg_grids.shape[1], grid.size
)
marg_pdfs = marg_pdfs.sum(axis=1)
# save the new pdfs in the big array
pdfs = ops.index_update(
pdfs,
ops.index[flagged_idx, :],
marg_pdfs,
indices_are_sorted=True,
unique_indices=True,
)
# add these flagged indices to the list of rows already done
already_done += flagged_idx
# now for the main posterior calculation loop
else:
# loop through batches
for batch_idx in range(0, nrows, batch_size):
# get the data batch
# and, if this is a conditional flow, the correpsonding conditions
batch = inputs.iloc[batch_idx : batch_idx + batch_size]
# if not drawing samples, just grab batch and conditions
if err_samples is None:
conditions = self._get_conditions(batch)
batch = np.array(batch[columns].values)
# if only drawing condition samples...
elif len(self.data_columns) == 1:
conditions = self._get_err_samples(
key, batch, err_samples, type="conditions"
)
batch = np.repeat(batch[columns].values, err_samples, axis=0)
# if drawing data and condition samples...
else:
conditions = self._get_err_samples(
key, batch, err_samples, type="conditions"
)
batch = self._get_err_samples(
key, batch, err_samples, skip=column, type="data"
)
# make a new copy of each row for each value of the column
# for which we are calculating the posterior
batch = np.hstack(
(
np.repeat(batch[:, :idx], len(grid), axis=0,),
np.tile(grid, len(batch))[:, None],
np.repeat(batch[:, idx:], len(grid), axis=0,),
)
)
# make similar copies of the conditions
conditions = np.repeat(conditions, len(grid), axis=0)
# calculate probability densities
log_prob = self._log_prob(self._params, batch, conditions).reshape(
(-1, len(grid))
)
prob = np.exp(log_prob)
# if we were Gaussian sampling, average over the samples
if err_samples is not None:
prob = prob.reshape(-1, err_samples, len(grid))
prob = prob.mean(axis=1)
# add the pdfs to the bigger list
pdfs = ops.index_update(
pdfs,
ops.index[batch_idx : batch_idx + batch_size, :],
prob,
indices_are_sorted=True,
unique_indices=True,
)
if normalize:
# normalize so they integrate to one
pdfs = pdfs / np.trapz(y=pdfs, x=grid).reshape(-1, 1)
if nan_to_zero:
# set NaN's equal to zero probability
pdfs = np.nan_to_num(pdfs, nan=0.0)
return pdfs
def sample(
self,
nsamples: int = 1,
conditions: pd.DataFrame = None,
save_conditions: bool = True,
seed: int = None,
) -> pd.DataFrame:
"""Returns samples from the normalizing flow.
Parameters
----------
nsamples : int, default=1
The number of samples to be returned.
conditions : pd.DataFrame, optional
If this is a conditional flow, you must pass conditions for
each sample. nsamples will be drawn for each row in conditions.
save_conditions : bool, default=True
If true, conditions will be saved in the DataFrame of samples
that is returned.
seed : int, optional
Sets the random seed for the samples.
Returns
-------
pd.DataFrame
Pandas DataFrame of samples.
"""
# validate nsamples
assert isinstance(nsamples, int), "nsamples must be a positive integer."
assert nsamples > 0, "nsamples must be a positive integer."
if self.conditional_columns is not None and conditions is None:
raise ValueError(
f"Must provide the following conditions\n{self.conditional_columns}"
)
# if this isn't a conditional flow, get empty conditions
if self.conditional_columns is None:
conditions = np.zeros((nsamples, 1))
# otherwise get conditions and make `nsamples` copies of each
else:
conditions = self._get_conditions(conditions)
conditions = np.repeat(conditions, nsamples, axis=0)
# draw from latent distribution
u = self.latent.sample(self._params[0], conditions.shape[0], seed)
# take the inverse back to the data distribution
x = self._inverse(self._params[1], u, conditions=conditions)[0]
# if not conditional, or save_conditions is False, this is all we need
if self.conditional_columns is None or save_conditions is False:
x = pd.DataFrame(x, columns=self.data_columns)
# but if conditional and save_conditions is True,
# save conditions with samples
else:
# unscale the conditons
conditions = conditions * self._condition_stds + self._condition_means
x = pd.DataFrame(
np.hstack((x, conditions)),
columns=self.data_columns + self.conditional_columns,
)
# return the samples!
return x
def _save_dict(self):
"""Returns the dictionary of all flow params to be saved."""
save_dict = {"class": self.__class__.__name__}
keys = [
"data_columns",
"conditional_columns",
"condition_means",
"condition_stds",
"data_error_model",
"condition_error_model",
"autoscale_conditions",
"info",
"latent_info",
"bijector_info",
"params",
]
for key in keys:
try:
save_dict[key] = getattr(self, key)
except AttributeError:
try:
save_dict[key] = getattr(self, "_" + key)
except AttributeError:
save_dict[key] = None
return save_dict
def save(self, file: str):
"""Saves the flow to a file.
Pickles the flow and saves it to a file that can be passed as
the `file` argument during flow instantiation.
WARNING: Currently, this method only works for bijectors that are
implemented in the `bijectors` module. If you want to save a flow
with a custom bijector, you either need to add the bijector to that
module, or handle the saving and loading on your end.
Parameters
----------
file : str
Path to where the flow will be saved.
Extension `.pkl` will be appended if not already present.
"""
save_dict = self._save_dict()
with open(file, "wb") as handle:
pickle.dump(save_dict, handle, recurse=True)
def train(
self,
inputs: pd.DataFrame,
epochs: int = 50,
batch_size: int = 1024,
optimizer: Optimizer = None,
loss_fn: Callable = None,
convolve_errs: bool = False,
seed: int = 0,
verbose: bool = False,
) -> list:
"""Trains the normalizing flow on the provided inputs.
Parameters
----------
inputs : pd.DataFrame
Data on which to train the normalizing flow.
Must have columns matching self.data_columns.
epochs : int, default=50
Number of epochs to train.
batch_size : int, default=1024
Batch size for training.
optimizer : jax Optimizer, default=adam(step_size=1e-3)
An optimizer from jax.experimental.optimizers.
loss_fn : Callable, optional
A function to calculate the loss: loss = loss_fn(params, x).
If not provided, will be -mean(log_prob).
convolve_errs : bool, default=False
Whether to draw new data from the error distributions during
each epoch of training. Assumes errors are Gaussian, and method
will look for error columns in `inputs`. Error columns must end
in `_err`. E.g. the error column for the variable `u` must be
`u_err`. Zero error assumed for any missing error columns.
seed : int, default=0
A random seed to control the batching and the (optional)
error sampling.
verbose : bool, default=False
If true, print the training loss every 5% of epochs.
Returns
-------
list
List of training losses from every epoch.
"""
# validate epochs
if not isinstance(epochs, int) or epochs <= 0:
raise ValueError("epochs must be a positive integer.")
# if no loss_fn is provided, use the default loss function
if loss_fn is None:
@jit
def loss_fn(params, x, c):
return -np.mean(self._log_prob(params, x, c))
# initialize the optimizer
optimizer = adam(step_size=1e-3) if optimizer is None else optimizer
opt_init, opt_update, get_params = optimizer
opt_state = opt_init(self._params)
# define the training step function
@jit
def step(i, opt_state, x, c):
params = get_params(opt_state)
gradients = grad(loss_fn)(params, x, c)
return opt_update(i, gradients, opt_state)
# get list of data columns
columns = list(self.data_columns)
# if this is a conditional flow, and autoscale_conditions == True
# save the means and stds of the conditional columns
if self.conditional_columns is not None and self._autoscale_conditions:
self._condition_means = np.array(
inputs[list(self.conditional_columns)].values.mean(axis=0)
)
condition_stds = np.array(
inputs[list(self.conditional_columns)].values.std(axis=0)
)
self._condition_stds = np.where(condition_stds != 0, condition_stds, 1)
# define a function to return batches
if convolve_errs:
def get_batch(sample_key, x, type):
return self._get_err_samples(sample_key, x, 1, type=type)
else:
def get_batch(sample_key, x, type):
if type == "conditions":
return self._get_conditions(x)
else:
return np.array(x[columns].values)
# get random seed for training loop
key = random.PRNGKey(seed)
if verbose:
print(f"Training {epochs} epochs \nLoss:")
# save the initial loss
X = np.array(inputs[columns].values)
C = self._get_conditions(inputs)
losses = [loss_fn(self._params, X, C)]
if verbose:
print(f"(0) {losses[-1]:.4f}")
# loop through training
itercount = itertools.count()
for epoch in range(epochs):
# new permutation of batches
permute_key, sample_key, key = random.split(key, num=3)
idx = random.permutation(permute_key, inputs.shape[0])
X = inputs.iloc[idx]
# loop through batches and step optimizer
for batch_idx in range(0, len(X), batch_size):
# if sampling from the error distribution, this returns a
# Gaussian sample of the batch. Else just returns batch as a
# jax array
batch = get_batch(
sample_key, X.iloc[batch_idx : batch_idx + batch_size], type="data"
)
batch_conditions = get_batch(
sample_key,
X.iloc[batch_idx : batch_idx + batch_size],
type="conditions",
)
opt_state = step(next(itercount), opt_state, batch, batch_conditions,)
# save end-of-epoch training loss
params = get_params(opt_state)
losses.append(
loss_fn(params, np.array(X[columns].values), self._get_conditions(X),)
)
if verbose and (
epoch % max(int(0.05 * epochs), 1) == 0 or (epoch + 1) == epochs
):
print(f"({epoch+1}) {losses[-1]:.4f}")
# update the flow parameters with the final training state
self._params = get_params(opt_state)
return losses
| 40.586712 | 92 | 0.577176 | 35,619 | 0.988291 | 0 | 0 | 297 | 0.008241 | 0 | 0 | 16,140 | 0.447823 |
9210dc5e1c681b47bc7424501d4cc31b8599ef0b | 4,763 | py | Python | tests/epyccel/test_epyccel_transpose.py | dina-fouad/pyccel | f4d919e673b400442b9c7b81212b6fbef749c7b7 | [
"MIT"
] | 206 | 2018-06-28T00:28:47.000Z | 2022-03-29T05:17:03.000Z | tests/epyccel/test_epyccel_transpose.py | dina-fouad/pyccel | f4d919e673b400442b9c7b81212b6fbef749c7b7 | [
"MIT"
] | 670 | 2018-07-23T11:02:24.000Z | 2022-03-30T07:28:05.000Z | tests/epyccel/test_epyccel_transpose.py | dina-fouad/pyccel | f4d919e673b400442b9c7b81212b6fbef749c7b7 | [
"MIT"
] | 19 | 2019-09-19T06:01:00.000Z | 2022-03-29T05:17:06.000Z | # pylint: disable=missing-function-docstring, missing-module-docstring/
from numpy.random import randint
from pyccel.epyccel import epyccel
def test_transpose_shape(language):
def f1(x : 'int[:,:]'):
from numpy import transpose
y = transpose(x)
n, m = y.shape
return n, m, y[-1,0], y[0,-1]
def f2(x : 'int[:,:,:]'):
from numpy import transpose
y = transpose(x)
n, m, p = y.shape
return n, m, p, y[0,-1,0], y[0,0,-1], y[-1,-1,0]
x1 = randint(50, size=(2,5))
x2 = randint(50, size=(2,3,7))
f1_epyc = epyccel(f1, language=language)
assert f1( x1 ) == f1_epyc( x1 )
f2_epyc = epyccel(f2, language=language)
assert f2( x2 ) == f2_epyc( x2 )
def test_transpose_property(language):
def f1(x : 'int[:,:]'):
y = x.T
n, m = y.shape
return n, m, y[-1,0], y[0,-1]
def f2(x : 'int[:,:,:]'):
y = x.T
n, m, p = y.shape
return n, m, p, y[0,-1,0], y[0,0,-1], y[-1,-1,0]
x1 = randint(50, size=(2,5))
x2 = randint(50, size=(2,3,7))
f1_epyc = epyccel(f1, language=language)
assert f1( x1 ) == f1_epyc( x1 )
f2_epyc = epyccel(f2, language=language)
assert f2( x2 ) == f2_epyc( x2 )
def test_transpose_in_expression(language):
def f1(x : 'int[:,:]'):
from numpy import transpose
y = transpose(x)+3
n, m = y.shape
return n, m, y[-1,0], y[0,-1]
def f2(x : 'int[:,:,:]'):
y = x.T*3
n, m, p = y.shape
return n, m, p, y[0,-1,0], y[0,0,-1], y[-1,-1,0]
x1 = randint(50, size=(2,5))
x2 = randint(50, size=(2,3,7))
f1_epyc = epyccel(f1, language=language)
assert f1( x1 ) == f1_epyc( x1 )
f2_epyc = epyccel(f2, language=language)
assert f2( x2 ) == f2_epyc( x2 )
def test_mixed_order(language):
def f1(x : 'int[:,:]'):
from numpy import transpose, ones
n, m = x.shape
y = ones((m,n), order='F')
z = x+transpose(y)
n, m = z.shape
return n, m, z[-1,0], z[0,-1]
def f2(x : 'int[:,:]'):
from numpy import transpose, ones
n, m = x.shape
y = ones((m,n), order='F')
z = x.transpose()+y
n, m = z.shape
return n, m, z[-1,0], z[0,-1]
def f3(x : 'int[:,:,:]'):
from numpy import transpose, ones
n, m, p = x.shape
y = ones((p,m,n))
z = transpose(x)+y
n, m, p = z.shape
return n, m, p, z[0,-1,0], z[0,0,-1], z[-1,-1,0]
x1 = randint(50, size=(2,5))
x2 = randint(50, size=(2,3,7))
f1_epyc = epyccel(f1, language=language)
assert f1( x1 ) == f1_epyc( x1 )
f2_epyc = epyccel(f2, language=language)
assert f2( x1 ) == f2_epyc( x1 )
f3_epyc = epyccel(f3, language=language)
assert f3( x2 ) == f3_epyc( x2 )
def test_transpose_pointer(language):
def f1(x : 'int[:,:]'):
from numpy import transpose
y = transpose(x)
x[0,-1] += 22
n, m = y.shape
return n, m, y[-1,0], y[0,-1]
def f2(x : 'int[:,:,:]'):
y = x.T
x[0,-1,0] += 11
n, m, p = y.shape
return n, m, p, y[0,-1,0], y[0,0,-1], y[-1,-1,0]
x1 = randint(50, size=(2,5))
x1_copy = x1.copy()
x2 = randint(50, size=(2,3,7))
x2_copy = x2.copy()
f1_epyc = epyccel(f1, language=language)
assert f1( x1 ) == f1_epyc( x1_copy )
f2_epyc = epyccel(f2, language=language)
assert f2( x2 ) == f2_epyc( x2_copy )
def test_transpose_of_expression(language):
def f1(x : 'int[:,:]'):
from numpy import transpose
y = transpose(x*2)+3
n, m = y.shape
return n, m, y[-1,0], y[0,-1]
def f2(x : 'int[:,:,:]'):
y = (x*2).T*3
n, m, p = y.shape
return n, m, p, y[0,-1,0], y[0,0,-1], y[-1,-1,0]
x1 = randint(50, size=(2,5))
x2 = randint(50, size=(2,3,7))
f1_epyc = epyccel(f1, language=language)
assert f1( x1 ) == f1_epyc( x1 )
f2_epyc = epyccel(f2, language=language)
assert f2( x2 ) == f2_epyc( x2 )
def test_force_transpose(language):
def f1(x : 'int[:,:]'):
from numpy import transpose, empty
n,m = x.shape
y = empty((m,n))
y[:,:] = transpose(x)
n, m = y.shape
return n, m, y[-1,0], y[0,-1]
def f2(x : 'int[:,:,:]'):
from numpy import empty
n,m,p = x.shape
y = empty((p,m,n))
y[:,:,:] = x.transpose()
n, m, p = y.shape
return n, m, p, y[0,-1,0], y[0,0,-1], y[-1,-1,0]
x1 = randint(50, size=(2,5))
x2 = randint(50, size=(2,3,7))
f1_epyc = epyccel(f1, language=language)
assert f1( x1 ) == f1_epyc( x1 )
f2_epyc = epyccel(f2, language=language)
assert f2( x2 ) == f2_epyc( x2 )
| 28.183432 | 71 | 0.505774 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 241 | 0.050598 |
921137b99d41d39db37724760282189158ca4b71 | 554 | py | Python | configs.py | h3xh4wk/yamlguide | 7a5b2ff88fabd2916e3f89383761f3ebbe75afc4 | [
"MIT"
] | null | null | null | configs.py | h3xh4wk/yamlguide | 7a5b2ff88fabd2916e3f89383761f3ebbe75afc4 | [
"MIT"
] | null | null | null | configs.py | h3xh4wk/yamlguide | 7a5b2ff88fabd2916e3f89383761f3ebbe75afc4 | [
"MIT"
] | null | null | null | import yaml
import pprint
def read_yaml():
""" A function to read YAML file"""
with open('configs.yml') as f:
config = list(yaml.safe_load_all(f))
return config
def write_yaml(data):
""" A function to write YAML file"""
with open('toyaml.yml', 'a') as f:
yaml.dump_all(data, f, default_flow_style=False)
if __name__ == "__main__":
# read the config yaml
my_config = read_yaml()
# pretty print my_config
pprint.pprint(my_config)
# write A python object to a file
write_yaml(my_config)
| 19.103448 | 56 | 0.646209 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 188 | 0.33935 |
92153cba78723d1570ff0bdccee6e07fae76883e | 649 | py | Python | consts.py | honey96dev/python-coinbase-tradingbot | ec5fad4336a10ed28c44373dc9509581dd96264f | [
"MIT"
] | 1 | 2021-12-29T09:31:37.000Z | 2021-12-29T09:31:37.000Z | consts.py | honey96dev/python-coinbase-tradingbot | ec5fad4336a10ed28c44373dc9509581dd96264f | [
"MIT"
] | null | null | null | consts.py | honey96dev/python-coinbase-tradingbot | ec5fad4336a10ed28c44373dc9509581dd96264f | [
"MIT"
] | null | null | null | months_json = {
"1": "January",
"2": "February",
"3": "March",
"4": "April",
"5": "May",
"6": "June",
"7": "July",
"8": "August",
"9": "September",
"01": "January",
"02": "February",
"03": "March",
"04": "April",
"05": "May",
"06": "June",
"07": "July",
"08": "August",
"09": "September",
"10": "October",
"11": "November",
"12": "December",
}
month_days = {
"1": 31,
"2": 28,
"3": 31,
"4": 30,
"5": 31,
"6": 30,
"7": 31,
"8": 31,
"9": 30,
"01": 31,
"02": 28,
"03": 31,
"04": 30,
"05": 31,
"06": 30,
"07": 31,
"08": 31,
"09": 30,
"10": 31,
"11": 30,
"12": 31,
}
| 12.98 | 20 | 0.391371 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 317 | 0.488444 |
9216100e8c2e43d2e0d3473e279433eb80f0be8a | 66 | py | Python | splearn/utils/__init__.py | Treers/spark-learn | ba814b51041c7dbff33c687e7d601e5c51f31c3d | [
"MIT"
] | null | null | null | splearn/utils/__init__.py | Treers/spark-learn | ba814b51041c7dbff33c687e7d601e5c51f31c3d | [
"MIT"
] | null | null | null | splearn/utils/__init__.py | Treers/spark-learn | ba814b51041c7dbff33c687e7d601e5c51f31c3d | [
"MIT"
] | null | null | null | #coding: utf-8
'''
@Time: 2019/4/25 11:15
@Author: fangyoucai
'''
| 11 | 22 | 0.621212 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 64 | 0.969697 |
92169128033b3d7a3bf8aa8e0095c438a88138a8 | 297 | py | Python | salty/exceptions.py | Markcial/salty | c9e5ad3e71af94cb5260f175e4d3734885efda30 | [
"MIT"
] | null | null | null | salty/exceptions.py | Markcial/salty | c9e5ad3e71af94cb5260f175e4d3734885efda30 | [
"MIT"
] | null | null | null | salty/exceptions.py | Markcial/salty | c9e5ad3e71af94cb5260f175e4d3734885efda30 | [
"MIT"
] | null | null | null |
__all__ = ['EncryptException', 'DecryptException', 'DefaultKeyNotSet', 'NoValidKeyFound']
class EncryptException(BaseException):
pass
class DecryptException(BaseException):
pass
class DefaultKeyNotSet(EncryptException):
pass
class NoValidKeyFound(DecryptException):
pass
| 14.85 | 89 | 0.76431 | 193 | 0.649832 | 0 | 0 | 0 | 0 | 0 | 0 | 71 | 0.239057 |
9218ee2ecb9207604449852e8a31eab96059f1e2 | 106 | py | Python | build_gpcr/management/commands/build_text.py | pszgaspar/protwis | 4989a67175ef3c95047d795c843cf6b9cf4141fa | [
"Apache-2.0"
] | 21 | 2016-01-20T09:33:14.000Z | 2021-12-20T19:19:45.000Z | build_gpcr/management/commands/build_text.py | pszgaspar/protwis | 4989a67175ef3c95047d795c843cf6b9cf4141fa | [
"Apache-2.0"
] | 75 | 2016-02-26T16:29:58.000Z | 2022-03-21T12:35:13.000Z | build_gpcr/management/commands/build_text.py | pszgaspar/protwis | 4989a67175ef3c95047d795c843cf6b9cf4141fa | [
"Apache-2.0"
] | 77 | 2016-01-22T08:44:26.000Z | 2022-02-01T15:54:56.000Z | from build.management.commands.build_text import Command as BuildText
class Command(BuildText):
pass | 21.2 | 69 | 0.811321 | 34 | 0.320755 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9219ccab82b93a1166169f02e863f1093a3ef85a | 8,822 | py | Python | test/unit/builtins/test_packages.py | jmgao/bfg9000 | 7ffb15aed8ea88ee454821e83aa8c42014ac8e34 | [
"BSD-3-Clause"
] | null | null | null | test/unit/builtins/test_packages.py | jmgao/bfg9000 | 7ffb15aed8ea88ee454821e83aa8c42014ac8e34 | [
"BSD-3-Clause"
] | null | null | null | test/unit/builtins/test_packages.py | jmgao/bfg9000 | 7ffb15aed8ea88ee454821e83aa8c42014ac8e34 | [
"BSD-3-Clause"
] | null | null | null | import mock
import ntpath
import os
import re
import sys
import unittest
from collections import namedtuple
from .common import BuiltinTest
from ... import make_env
from bfg9000 import file_types, options as opts
from bfg9000.builtins import packages
from bfg9000.exceptions import PackageResolutionError, PackageVersionError
from bfg9000.file_types import Directory, HeaderDirectory
from bfg9000.packages import CommonPackage, Framework
from bfg9000.path import abspath
from bfg9000.platforms import platform_name
from bfg9000.versioning import SpecifierSet, Version
if sys.version_info >= (3,):
open_name = 'builtins.open'
else:
open_name = '__builtin__.open'
# Fix the mock package's mock_open function to work with iter(); note: this is
# already fixed in Python 3.7.1's unittest.mock.
def mock_open(*args, **kwargs):
mo = mock.mock_open(*args, **kwargs)
handle = mo.return_value
handle.__iter__.side_effect = lambda: iter(handle.readlines.side_effect())
return mo
def mock_which(*args, **kwargs):
return [os.path.abspath('/command')]
def mock_execute(args, **kwargs):
if args[-1] == '--version':
return ('gcc (Ubuntu 5.4.0-6ubuntu1~16.04.9) 5.4.0 20160609\n' +
'Copyright (C) 2015 Free Software Foundation, Inc.\n')
elif args[-1] == '-Wl,--version':
return '', '/usr/bin/ld --version\n'
elif args[-1] == '-print-search-dirs':
return 'libraries: =/usr/lib\n'
elif args[-1] == '-print-sysroot':
return '/\n'
elif args[-1] == '--verbose':
return 'SEARCH_DIR("/usr")\n'
elif args[-1] == '/?':
return ('Microsoft (R) C/C++ Optimizing Compiler Version ' +
'19.12.25831 for x86')
elif args[-1] == '--modversion':
return '1.2.3\n'
class TestFramework(unittest.TestCase):
def test_framework(self):
env = make_env('darwin')
self.assertEqual(
packages.framework(env, 'name'),
CommonPackage('name', env.target_platform.object_format,
link_options=opts.option_list(opts.lib(
Framework('name')
)))
)
def test_framework_suffix(self):
env = make_env('darwin')
self.assertEqual(
packages.framework(env, 'name', 'suffix'),
CommonPackage('name,suffix',
env.target_platform.object_format,
link_options=opts.option_list(opts.lib(
Framework('name', 'suffix')
)))
)
def test_frameworks_unsupported(self):
env = make_env('linux')
with self.assertRaises(PackageResolutionError):
packages.framework(env, 'name')
with self.assertRaises(PackageResolutionError):
packages.framework(env, 'name', 'suffix')
class TestPackage(BuiltinTest):
def test_name(self):
with mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('bfg9000.shell.which', mock_which): # noqa
pkg = packages.package(self.env, 'name')
self.assertEqual(pkg.name, 'name')
self.assertEqual(pkg.version, Version('1.2.3'))
self.assertEqual(pkg.specifier, SpecifierSet())
self.assertEqual(pkg.static, False)
def test_version(self):
with mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('bfg9000.shell.which', mock_which): # noqa
pkg = packages.package(self.env, 'name', version='>1.0')
self.assertEqual(pkg.name, 'name')
self.assertEqual(pkg.version, Version('1.2.3'))
self.assertEqual(pkg.specifier, SpecifierSet('>1.0'))
self.assertEqual(pkg.static, False)
def test_lang(self):
with mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('bfg9000.shell.which', mock_which): # noqa
pkg = packages.package(self.env, 'name', lang='c++')
self.assertEqual(pkg.name, 'name')
self.assertEqual(pkg.version, Version('1.2.3'))
self.assertEqual(pkg.specifier, SpecifierSet())
self.assertEqual(pkg.static, False)
def test_kind(self):
with mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('bfg9000.shell.which', mock_which): # noqa
pkg = packages.package(self.env, 'name', kind='static')
self.assertEqual(pkg.name, 'name')
self.assertEqual(pkg.version, Version('1.2.3'))
self.assertEqual(pkg.specifier, SpecifierSet())
self.assertEqual(pkg.static, True)
def test_invalid_kind(self):
with self.assertRaises(ValueError):
packages.package(self.env, 'name', kind='bad')
class TestBoostPackage(unittest.TestCase):
def test_boost_version(self):
data = '#define BOOST_LIB_VERSION "1_23_4"\n'
with mock.patch(open_name, mock_open(read_data=data)):
hdr = HeaderDirectory(abspath('path'))
self.assertEqual(packages._boost_version(hdr, SpecifierSet('')),
Version('1.23.4'))
def test_boost_version_too_old(self):
data = '#define BOOST_LIB_VERSION "1_23_4"\n'
with mock.patch(open_name, mock_open(read_data=data)):
hdr = HeaderDirectory(abspath('path'))
with self.assertRaises(PackageVersionError):
packages._boost_version(hdr, SpecifierSet('>=1.30'))
def test_boost_version_cant_parse(self):
data = 'foobar\n'
with mock.patch(open_name, mock_open(read_data=data)):
hdr = HeaderDirectory(abspath('path'))
with self.assertRaises(PackageVersionError):
packages._boost_version(hdr, SpecifierSet(''))
def test_posix(self):
env = make_env('linux', clear_variables=True)
def mock_exists(x):
if ( re.search(r'[/\\]boost[/\\]version.hpp$', x) or
re.search(r'[/\\]libboost_thread', x) or
x in ['/usr/include', '/usr/lib']):
return True
return False
with mock.patch('bfg9000.builtins.packages._boost_version',
return_value=Version('1.23')), \
mock.patch('bfg9000.shell.which', return_value=['command']), \
mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('os.path.exists', mock_exists): # noqa
pkg = packages.boost_package(env, 'thread')
self.assertEqual(pkg.name, 'boost(thread)')
self.assertEqual(pkg.version, Version('1.23'))
def test_windows_default_location(self):
env = make_env('windows', clear_variables=True)
boost_incdir = r'C:\Boost\include\boost-1.23'
def mock_walk(top):
yield (top,) + (
[('boost-1.23', ntpath.join(top, 'boost-1.23'))],
[]
)
def mock_execute(*args, **kwargs):
if args[0][1] == '/?':
return 'cl.exe'
raise ValueError()
def mock_exists(x):
if re.search(r'[/\\]boost[/\\]version.hpp$', x):
return True
return False
with mock.patch('bfg9000.builtins.find._walk_flat', mock_walk), \
mock.patch('bfg9000.builtins.packages._boost_version',
return_value=Version('1.23')), \
mock.patch('bfg9000.shell.which', return_value=['command']), \
mock.patch('bfg9000.shell.execute', mock_execute), \
mock.patch('os.path.exists', mock_exists): # noqa
pkg = packages.boost_package(env, 'thread')
self.assertEqual(pkg.name, 'boost(thread)')
self.assertEqual(pkg.version, Version('1.23'))
self.assertEqual(pkg._compile_options, opts.option_list(
opts.include_dir(HeaderDirectory(abspath(boost_incdir)))
))
self.assertEqual(pkg._link_options, opts.option_list(
opts.lib_dir(Directory(abspath(r'C:\Boost\lib')))
))
class TestSystemExecutable(BuiltinTest):
def test_name(self):
with mock.patch('bfg9000.builtins.packages.which', mock_which):
self.assertEqual(
packages.system_executable(self.env, 'name'),
file_types.Executable(abspath('/command'),
self.env.target_platform.object_format)
)
def test_format(self):
with mock.patch('bfg9000.builtins.packages.which', mock_which):
self.assertEqual(
packages.system_executable(self.env, 'name', 'format'),
file_types.Executable(abspath('/command'), 'format')
)
| 39.208889 | 78 | 0.596803 | 7,029 | 0.796758 | 1,473 | 0.166969 | 0 | 0 | 0 | 0 | 1,719 | 0.194854 |
9219da371789bf95d6669c85fa9c102d7ff9fec6 | 1,459 | py | Python | pypyr/steps/nowutc.py | FooBarQuaxx/pypyr | ebe56b2200a53e2f38c78bbb42d466bb1556c37c | [
"Apache-2.0"
] | null | null | null | pypyr/steps/nowutc.py | FooBarQuaxx/pypyr | ebe56b2200a53e2f38c78bbb42d466bb1556c37c | [
"Apache-2.0"
] | null | null | null | pypyr/steps/nowutc.py | FooBarQuaxx/pypyr | ebe56b2200a53e2f38c78bbb42d466bb1556c37c | [
"Apache-2.0"
] | null | null | null | """pypyr step saves the current utc datetime to context."""
from datetime import datetime, timezone
import logging
# logger means the log level will be set correctly
logger = logging.getLogger(__name__)
def run_step(context):
"""Save current utc datetime to context.
Args:
context: pypyr.context.Context. Mandatory.
The following context key is optional:
- nowUtcIn. str. Datetime formatting expression. For full list
of possible expressions, check here:
https://docs.python.org/3.7/library/datetime.html#strftime-and-strptime-behavior
All inputs support pypyr formatting expressions.
This step creates now in context, containing a string representation of the
timestamp. If input formatting not specified, defaults to ISO8601.
Default is:
YYYY-MM-DDTHH:MM:SS.ffffff+00:00, or, if microsecond is 0,
YYYY-MM-DDTHH:MM:SS
Returns:
None. updates context arg.
"""
logger.debug("started")
format_expression = context.get('nowUtcIn', None)
if format_expression:
formatted_expression = context.get_formatted_string(format_expression)
context['nowUtc'] = datetime.now(
timezone.utc).strftime(formatted_expression)
else:
context['nowUtc'] = datetime.now(timezone.utc).isoformat()
logger.info("timestamp %s saved to context nowUtc", context['nowUtc'])
logger.debug("done")
| 32.422222 | 98 | 0.686086 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 955 | 0.654558 |
9219f5cfa01beb284c389cfb42e21e0246c37a82 | 1,063 | py | Python | datumaro/datumaro/util/test_utils.py | godlikejay/cvat | 50c40ba70dd8f890478068f29ce3de1057ec848c | [
"MIT"
] | 2 | 2020-01-10T08:50:50.000Z | 2020-01-23T06:11:11.000Z | datumaro/datumaro/util/test_utils.py | godlikejay/cvat | 50c40ba70dd8f890478068f29ce3de1057ec848c | [
"MIT"
] | 5 | 2022-02-13T20:46:25.000Z | 2022-02-27T10:34:53.000Z | datumaro/datumaro/util/test_utils.py | godlikejay/cvat | 50c40ba70dd8f890478068f29ce3de1057ec848c | [
"MIT"
] | 1 | 2020-06-26T00:27:43.000Z | 2020-06-26T00:27:43.000Z |
# Copyright (C) 2019 Intel Corporation
#
# SPDX-License-Identifier: MIT
import inspect
import os
import os.path as osp
import shutil
def current_function_name(depth=1):
return inspect.getouterframes(inspect.currentframe())[depth].function
class FileRemover:
def __init__(self, path, is_dir=False, ignore_errors=False):
self.path = path
self.is_dir = is_dir
self.ignore_errors = ignore_errors
def __enter__(self):
return self
# pylint: disable=redefined-builtin
def __exit__(self, type=None, value=None, traceback=None):
if self.is_dir:
shutil.rmtree(self.path, ignore_errors=self.ignore_errors)
else:
os.remove(self.path)
# pylint: enable=redefined-builtin
class TestDir(FileRemover):
def __init__(self, path=None, ignore_errors=False):
if path is None:
path = osp.abspath('temp_%s' % current_function_name(2))
os.makedirs(path, exist_ok=ignore_errors)
super().__init__(path, is_dir=True, ignore_errors=ignore_errors) | 27.25641 | 73 | 0.687676 | 813 | 0.764817 | 0 | 0 | 0 | 0 | 0 | 0 | 147 | 0.138288 |
921abd69963e5dab6e9a4498bebdc2bdb12a2b92 | 1,480 | py | Python | module3-nosql-and-document-oriented-databases/mongo_queries.py | ayarelif/DS-Unit-3-Sprint-2-SQL-and-Databases | fbd145f5dfbaa1e8b68e7369b593a4eee2c5ae5f | [
"MIT"
] | null | null | null | module3-nosql-and-document-oriented-databases/mongo_queries.py | ayarelif/DS-Unit-3-Sprint-2-SQL-and-Databases | fbd145f5dfbaa1e8b68e7369b593a4eee2c5ae5f | [
"MIT"
] | null | null | null | module3-nosql-and-document-oriented-databases/mongo_queries.py | ayarelif/DS-Unit-3-Sprint-2-SQL-and-Databases | fbd145f5dfbaa1e8b68e7369b593a4eee2c5ae5f | [
"MIT"
] | null | null | null |
# BG_URI="mongodb+srv://elifayar:<password>@clusters.lcjcx.mongodb.net/<dbname>?retryWrites=true&w=majority")
# client = pymongo.MongoClient(DB_URI)
# db = client.test
from pymongo import MongoClient
import os
from dotenv import load_dotenv
load_dotenv()
DB_USER = os.getenv("MONGO_USER", default="OOPS")
DB_PASSWORD = os.getenv("MONGO_PASSWORD", default="OOPS")
CLUSTER_NAME = os.getenv("MONGO_CLUSTER_NAME", default="OOPS")
connection_uri = f"mongodb+srv://{DB_USER}:{DB_PASSWORD}@{CLUSTER_NAME}.mongodb.net/test?retryWrites=true&w=majority"
print("----------------")
print("URI:", connection_uri)
client =MongoClient(connection_uri)
print("----------------")
print("CLIENT:", type(client), client)
print("DATABASES", client.list_database_names())
db = client.my_test_database # "test_database" or whatever you want to call it
print("----------------")
print("DB:", type(db), db)
collection = db.pokemon_test # "pokemon_test" or whatever you want to call it
print("----------------")
print("COLLECTION:", type(collection), collection)
print("DOCUMENTS COUNT:", collection.count_documents({}))
collection.insert_one({
"name": "Pikachu",
"level": 30,
"exp": 76000000000,
"hp": 400,
"parents": ['Pikachu',"Raichu"],
"other_attr": {
"a":1,
"b":2,
"c":3
}
})
print("DOCS:", collection.count_documents({}))
print(collection.count_documents({"name": "Pikachu"}))
#from pprint import pprint
#breakpoint()
#dir(collection) | 27.407407 | 117 | 0.672973 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 717 | 0.484459 |
921b8b35d970a247993f6069df7c4e369c0dbd62 | 2,148 | py | Python | Video_To_ASCII/main.py | cppshizoidS/Python | dfde647ba4a6fb828ef9a564924416bebd875929 | [
"MIT"
] | 5 | 2022-03-12T02:44:41.000Z | 2022-03-24T10:33:28.000Z | Video_To_ASCII/main.py | cppshizoidS/Python | dfde647ba4a6fb828ef9a564924416bebd875929 | [
"MIT"
] | 1 | 2022-03-16T09:19:21.000Z | 2022-03-16T09:19:21.000Z | Video_To_ASCII/main.py | cppshizoidS/Python | dfde647ba4a6fb828ef9a564924416bebd875929 | [
"MIT"
] | null | null | null | import os
import sys
from time import sleep as sleep
import glob
import cv2
from PIL import Image
ESC = b'\033'
CSI = ESC + b'['
Фuse_ansi_escape_sequences = True
if not use_ansi_escape_sequences:
import ctypes
from ctypes import c_long
console_handle = ctypes.windll.kernel32.GetStdHandle(c_long(-11))
video_columns, video_lines = 140, 70
has_inverted_colors = True
def set_console_size(columns, lines):
os.system(f'mode con cols={columns} lines={lines} ')
os.system('cls')
set_console_size(40, 20)
selected_video_number = 0
videos = glob.glob('*.mp4')
for video_index, video_name in enumerate(videos):
print(f'[{video_index + 1}] - {video_name}')
selected_video_number = input('\nВведите номер видео: ')
try:
selected_video = videos[int(selected_video_number) - 1]
except:
set_console_size(100, 30)
print(f'{selected_video_number} - неверный номер X_X')
exit()
vidcap = cv2.VideoCapture(selected_video)
success, image = vidcap.read()
set_console_size(video_columns, video_lines)
symbols = list(r'$@B%8&WM#*oahkbdpqwmZO0QLCJUYXzcvunxrjft/\|()1[]?-_+~<>i!lI;:, ')
if has_inverted_colors:
symbols.reverse()
stdout = os.fdopen(sys.stdout.fileno(), 'wb', video_columns * video_lines * 2)
try:
while True:
success, image = vidcap.read()
if not success:
set_console_size(100, 30)
break
im = Image.fromarray(image)
im = im.resize((video_columns, video_lines))
im = im.convert('L')
pixels = im.load()
result = []
for y in range(1, video_lines):
for x in range(1, video_columns):
result.append(symbols[int(pixels[x, y] / 36) - 1])
result.append('\n')
# Set cursor to the top left corner
if use_ansi_escape_sequences:
stdout.write(CSI + b'1;1H')
else:
ctypes.windll.kernel32.SetConsoleCursorPosition(console_handle, 0)
stdout.write(''.join(result).encode())
stdout.flush()
sleep(1 / 60) # Sleep one sixtieth of a second (60 fps)
except KeyboardInterrupt:
set_console_size(100, 30)
| 24.409091 | 84 | 0.657356 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 367 | 0.168426 |
921b98b759492f2fca11edef2d3a5faa5d0dc853 | 1,797 | py | Python | Scripts/simulation/careers/acting/performance_object_data.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/careers/acting/performance_object_data.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/careers/acting/performance_object_data.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\careers\acting\performance_object_data.py
# Compiled at: 2018-09-18 00:30:33
# Size of source mod 2**32: 2272 bytes
import services
class PerformanceObjectData:
def __init__(self, objects, pre_performance_states, performance_states, post_performance_states):
self._objects = objects
self._pre_performance_states = pre_performance_states
self._performance_states = performance_states
self._post_performance_states = post_performance_states
def set_performance_states(self):
self._set_states(self._performance_states)
def set_pre_performance_states(self):
bucks_tracker = services.active_sim_info().get_bucks_tracker()
for state_data in self._pre_performance_states:
skip_perk = state_data.skip_with_perk
state_value = state_data.state_value
if skip_perk is not None:
if bucks_tracker is not None:
if bucks_tracker.is_perk_unlocked(skip_perk):
continue
for obj in self._objects:
if obj.has_state(state_value.state):
obj.set_state((state_value.state), state_value, immediate=True, force_update=True)
def set_post_performance_states(self):
self._set_states(self._post_performance_states)
def _set_states(self, states):
for state_value in states:
for obj in self._objects:
if obj.has_state(state_value.state):
obj.set_state((state_value.state), state_value, immediate=True, force_update=True) | 44.925 | 107 | 0.691708 | 1,444 | 0.803561 | 0 | 0 | 0 | 0 | 0 | 0 | 330 | 0.183639 |
921b9afc0758915d77df27dde1108c6a2abd065e | 670 | py | Python | tests/extractor/test_with_fallback.py | thegangtechnology/excel_comment_orm | b38156b406ccb3ce87737b8ed049bbf3b8a39050 | [
"MIT"
] | 2 | 2020-11-10T04:53:07.000Z | 2020-11-12T03:53:46.000Z | tests/extractor/test_with_fallback.py | thegangtechnology/excel_comment_orm | b38156b406ccb3ce87737b8ed049bbf3b8a39050 | [
"MIT"
] | 50 | 2020-11-09T06:30:31.000Z | 2022-01-06T05:00:50.000Z | tests/extractor/test_with_fallback.py | thegangtechnology/excel_comment_orm | b38156b406ccb3ce87737b8ed049bbf3b8a39050 | [
"MIT"
] | null | null | null | from openpyxl import Workbook
from openpyxl.comments import Comment
from exco import util, ExcoTemplate, ExcelProcessorFactory
def test_with_defaults():
wb = Workbook()
sheet = wb.active
sheet.cell(1, 1).value = 'not a number'
sheet.cell(1, 1).comment = Comment(util.long_string("""
{{--
key: key
parser: int
fallback: 999
--}}
"""), 'author')
spec = ExcoTemplate.from_workbook(wb).to_raw_excel_processor_spec()
print(spec)
ep = ExcelProcessorFactory.default().create_from_spec(spec)
result = ep.process_workbook(wb)
assert not result.is_ok
assert result.to_dict() == {'key': 999}
| 25.769231 | 71 | 0.656716 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 123 | 0.183582 |
921be83bfe8f9a489861abaead3be210cb7d7e10 | 9,107 | py | Python | python/miind/connections.py | dekamps/miind | 4b321c62c2bd27eb0d5d8336a16a9e840ba63856 | [
"MIT"
] | 13 | 2015-09-15T17:28:25.000Z | 2022-03-22T20:26:47.000Z | python/miind/connections.py | dekamps/miind | 4b321c62c2bd27eb0d5d8336a16a9e840ba63856 | [
"MIT"
] | 41 | 2015-08-25T07:50:55.000Z | 2022-03-21T16:20:37.000Z | python/miind/connections.py | dekamps/miind | 4b321c62c2bd27eb0d5d8336a16a9e840ba63856 | [
"MIT"
] | 9 | 2015-09-14T20:52:07.000Z | 2022-03-08T12:18:18.000Z | # -*- coding: utf-8 -*-
"""
Created on Sat Jan 17 16:51:56 2015
@author: scsmdk
"""
import miind.nodes as nodes
import miind.variables as variables
TALLY = {}
def register(i,o):
tup = (i,o)
if tup not in TALLY:
TALLY[tup] = 1
return 0
else:
TALLY[tup] += 1
return TALLY[tup] - 1
def parse_connection(connection, weighttype):
i = str(nodes.NODE_NAMES[connection.attrib['In']])
o = str(nodes.NODE_NAMES[connection.attrib['Out']])
# Multiple connections with same label are allowed, so we need to keep a tally
count = register(i,o)
tally = '_' + str(count)
s = ''
if weighttype.text == 'DelayedConnection':
s += '\tDelayedConnection con_' + i + '_' + o + tally + '('
s += connection.text.split()[0] + ','
s += connection.text.split()[1] + ','
s += connection.text.split()[2] + ');\n'
elif weighttype.text == "CustomConnectionParameters":
s += '\tCustomConnectionParameters con_' + i + '_' + o + tally + ';\n'
for ak,av in connection.attrib.items():
if ak == 'In' or ak == 'Out':
continue
s += '\tcon_' + i + '_' + o + tally + '.setParam(\"' + ak + '\", std::to_string(' + av +'));\n'
else:
if weighttype.text == 'double':
s += '\tdouble con_' + i + '_' + o + tally + '('
s += connection.text + ');\n'
s += '\tnetwork.makeFirstInputOfSecond('
s += 'id_' + i + ','
s += 'id_' + o + ','
s += 'con_' + i + '_' + o + tally + ');\n'
return s
def parse_external_outgoing_connection(connection, nodemap, network_name='network',looped_definition=False):
o = str(nodemap[connection.attrib['Node']])
if looped_definition:
o = str(len(nodemap))+'*i+'+o
return '\t\t\t' + network_name + '.addExternalMonitor('+ o +');\n'
def parse_grid_connection(connection, nodemap, network_name='network',looped_definition=False):
i = str(nodemap[connection.attrib['In']])
o = str(nodemap[connection.attrib['Out']])
if looped_definition:
i = str(len(nodemap))+'*i+'+i
o = str(len(nodemap))+'*i+'+o
eff = connection.attrib['efficacy']
num_cons = connection.attrib['num_connections']
delay = connection.attrib['delay']
return '\t\t\t' + network_name + '.addGridConnection('+ i +','+ o +','+ eff +','+ num_cons +','+ delay +');\n'
def parse_external_incoming_grid_connection(connection, nodemap, id, network_name='network',looped_definition=False):
o = str(nodemap[connection.attrib['Node']])
nid = str(id)
if looped_definition:
o = str(len(nodemap))+'*i+'+o
eff = connection.attrib['efficacy']
num_cons = connection.attrib['num_connections']
delay = connection.attrib['delay']
return '\t\t\t' + network_name + '.addGridConnection('+ o +','+ eff +','+ num_cons +',(double)'+ delay +','+ nid +');\n'
def parse_grid_vectorized_connection(connection, nodemap, network_name='network',looped_definition=False):
node_i = str(nodemap[connection.attrib['In']])
node_o = str(nodemap[connection.attrib['Out']])
s = '\t\t\tstd::map<std::string, std::string> params_' + node_i + '_' + node_o + ';\n'
for ak,av in connection.attrib.items():
if ak in ['In', 'Out']:
continue
s += '\t\t\tparams_' + node_i + '_' + node_o + '[\"' + ak + '\"] = std::to_string(' + av + ');\n'
if looped_definition:
i = str(len(nodemap))+'*i+'+node_i
o = str(len(nodemap))+'*i+'+node_o
else:
i = node_i
o = node_o
s += '\t\t\t' + network_name + '.addGridConnection('+ i +','+ o +', params_' + node_i + '_' + node_o + ');\n'
return s
def parse_external_incoming_grid_vectorized_connection(connection, nodemap, id, network_name='network',looped_definition=False):
node_o = str(nodemap[connection.attrib['Node']])
s = '\t\t\tstd::map<std::string, std::string> params_extern_' + node_o + ';\n'
for ak,av in connection.attrib.items():
if ak in ['Node']:
continue
s += '\t\t\tparams_extern_' + node_o + '[\"' + ak + '\"] = std::to_string(' + av + ');\n'
nid = str(id)
if looped_definition:
o = str(len(nodemap))+'*i+'+node_o
else:
o = node_o
s += '\t\t\t' + network_name + '.addGridConnection('+ o +', params_extern_' + node_o + ',' + nid + ');\n'
return s
def parse_mesh_connection(connection, nodemap, mat_name, network_name='network',looped_definition=False):
i = str(nodemap[connection.attrib['In']])
o = str(nodemap[connection.attrib['Out']])
if looped_definition:
i = str(len(nodemap))+'*i+'+i
o = str(len(nodemap))+'*i+'+o
num_cons = connection.text.split()[0]
eff = connection.text.split()[1]
delay = connection.text.split()[2]
return '\t\t\t' + network_name + '.addMeshConnection('+ i +','+ o +','+ eff +','+ num_cons +','+delay+',&'+ mat_name +');\n'
def parse_external_incoming_mesh_connection(connection, nodemap, mat_name, id, network_name='network',looped_definition=False):
o = str(nodemap[connection.attrib['Node']])
nid = str(id)
if looped_definition:
o = str(len(nodemap))+'*i+'+o
num_cons = connection.text.split()[0]
eff = connection.text.split()[1]
delay = connection.text.split()[2]
return '\t\t\t' + network_name + '.addMeshConnection('+ o +','+ eff +','+ num_cons +',(double)'+delay+',&'+ mat_name +','+ nid +');\n'
def parse_mesh_vectorized_connection(connection, nodemap, mat_name, network_name='network',looped_definition=False):
node_i = str(nodemap[connection.attrib['In']])
node_o = str(nodemap[connection.attrib['Out']])
s = '\t\t\tstd::map<std::string, std::string> params_' + node_i + '_' + node_o + ';\n'
if looped_definition:
i = str(len(nodemap))+'*i+'+node_i
o = str(len(nodemap))+'*i+'+node_o
else:
i = node_i
o = node_o
for ak,av in connection.attrib.items():
if ak in ['In', 'Out']:
continue
s += '\t\t\tparams_' + node_i + '_' + node_o + '[\"' + ak + '\"] = std::to_string(' + av + ');\n'
s += '\t\t\t' + network_name + '.addMeshCustomConnection('+ i +','+ o +', params_' + node_i + '_' + node_o + ',&'+ mat_name +');\n'
return s
def parse_external_incoming_mesh_vectorized_connection(connection, nodemap, mat_name, id, network_name='network',looped_definition=False):
node_o = str(nodemap[connection.attrib['Node']])
s = '\t\t\tstd::map<std::string, std::string> params_extern_' + node_o + ';\n'
for ak,av in connection.attrib.items():
if ak in ['Node']:
continue
s += '\t\t\tparams_extern_' + node_o + '[\"' + ak + '\"] = \"' + av + '\";\n'
nid = str(id)
if looped_definition:
o = str(len(nodemap))+'*i+'+node_o
else:
o = node_o
s += '\t\t\t' + network_name + '.addMeshCustomConnection('+ o +', params_extern_' + node_o + ',&'+ mat_name +',' + nid + ');\n'
return s
def parse_connections(connection_list,weighttype,outfile):
for connection in connection_list:
s = parse_connection(connection,weighttype)
outfile.write(s)
def parse_incoming_connections(connection_list,weighttype,outfile):
for connection in connection_list:
s = parse_incoming_connection(connection,weighttype)
outfile.write(s)
def parse_outgoing_connections(connection_list,outfile):
for connection in connection_list:
s = parse_outgoing_connection(connection)
outfile.write(s)
def parse_incoming_connection(connection, weighttype):
node = str(nodes.NODE_NAMES[connection.attrib['Node']])
# Multiple connections with same label are allowed, so we need to keep a tally
count = register('EXTERNAL',node)
tally = '_' + str(count)
s = ''
if weighttype.text == 'DelayedConnection':
s += '\tDelayedConnection con_EXTERNAL_' + node + tally + '('
s += connection.text.split()[0] + ','
s += connection.text.split()[1] + ','
s += connection.text.split()[2] + ');\n'
elif weighttype.text == "CustomConnectionParameters":
s += '\tCustomConnectionParameters con_EXTERNAL_' + node + tally + ';\n'
for ak,av in connection.attrib.items():
if ak == 'Node':
continue
s += '\tcon_EXTERNAL_' + node + tally + '.setParam(\"' + ak + '\", std::to_string(' + av +'));\n'
else:
if weighttype.text == 'double':
s += '\tdouble con_EXTERNAL_' + node + tally + '('
s += connection.text + ');\n'
s += '\t\t\tnetwork.setNodeExternalPrecursor('
s += 'id_' + node + ','
s += 'con_EXTERNAL_' + node + tally + ');\n'
return s
def parse_outgoing_connection(connection):
node = str(nodes.NODE_NAMES[connection.attrib['Node']])
# Multiple connections with same label are allowed, so we need to keep a tally
count = register(node,'EXTERNAL')
tally = '_' + str(count)
s = ''
s += '\t\t\tnetwork.setNodeExternalSuccessor('
s += 'id_' + node + ');\n'
return s
| 37.788382 | 138 | 0.595037 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,282 | 0.250576 |
921c258fc50f1fef81ae0ad9d10a9a140a341776 | 15,129 | py | Python | PGM_MLRN_supplementary/train_mixed_precision_distributed.py | MaryZolfaghar/AbstractReasoning | d5f39e447361353eb0f3d77c88d9208942526c0b | [
"Apache-2.0"
] | null | null | null | PGM_MLRN_supplementary/train_mixed_precision_distributed.py | MaryZolfaghar/AbstractReasoning | d5f39e447361353eb0f3d77c88d9208942526c0b | [
"Apache-2.0"
] | null | null | null | PGM_MLRN_supplementary/train_mixed_precision_distributed.py | MaryZolfaghar/AbstractReasoning | d5f39e447361353eb0f3d77c88d9208942526c0b | [
"Apache-2.0"
] | null | null | null | import numpy as np
import torch
import os
import sys
import re
import math
from torch.utils.data import Dataset, DataLoader
from apex import amp
from apex.parallel import DistributedDataParallel as DDP
from lamb import Lamb
#tensorboard for accuracy graphs
import tensorflow as tf
def getCombinations(inputTensor, N, c, d):#input shape=(batch_size, obj_count, obj_dim) #batch_size=N, obj_count=c, obj_dim=d
tensorA = inputTensor.reshape(N, 1, c, d).expand(N, c, c, d)
tensorB = tensorA.transpose(1, 2)
return torch.cat((tensorB, tensorA), 3)
dataset_name = 'neutral'#'interpolation'#'extrapolation'
if len(sys.argv) < 2:
print("Missing data path!")
exit()
datapath_preprocessed = os.path.join(sys.argv[1], dataset_name + '_preprocessed')
class PgmDataset(Dataset):
def __init__(self, filenames):
'Initialization'
self.filenames = filenames
def __len__(self):
return len(self.filenames)
def __getitem__(self, index):
filename = self.filenames[index]
with np.load(os.path.join(datapath_preprocessed, filename)) as data:
image = data['image'].astype(np.uint8).reshape(16, 80, 80)
target = data['target']
return image, target
class WReN(torch.nn.Module):
def __init__(self, m):
super(WReN, self).__init__()
self.relation_network_depth = m
self.g_dim = 512
self.h_dim = 256
self.f_dim = 256
self.use_mag_enc = True #switch between scalar input and magnitude encoded input
self.mag_enc_type_relu = False #switch between gaussian magnitude encoding and relu based magnitude encoding
self.magnitude_encoding_dim = 20
#model
#magnitude encoding
self.input_scale = 2.0/255.0
self.input_offset = -1.0
std_dev = 0.28
self.input_encoding_variance_inv = 1.0 / (math.sqrt(2.0) * std_dev)
self.normalization_factor = 1.0 / (math.sqrt(2*math.pi) * std_dev)
self.mag_scale = torch.nn.Parameter(torch.linspace(-1.0, 1.0, steps=self.magnitude_encoding_dim), requires_grad=False)
if self.use_mag_enc:
conv_input_dim = self.magnitude_encoding_dim
else:
conv_input_dim = 1
self.conv = torch.nn.Sequential(
torch.nn.Conv2d(conv_input_dim, 32, 3, stride=2),
torch.nn.LeakyReLU(),
torch.nn.Conv2d(32, 32, 3, stride=2),
torch.nn.LeakyReLU(),
torch.nn.Conv2d(32, 32, 3, stride=2),
torch.nn.LeakyReLU(),
torch.nn.Conv2d(32, 32, 3, stride=2),
torch.nn.LeakyReLU()
)
self.post_cnn_linear = torch.nn.Linear(32*4*4, 256-9)
self.tag_matrix = torch.nn.Parameter(torch.eye(9).repeat(8, 1), requires_grad=False)
self.g = torch.nn.Sequential(
torch.nn.Linear(2*256, self.g_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.g_dim, self.g_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.g_dim, self.g_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.g_dim, self.h_dim),
torch.nn.LeakyReLU()
)
h = []
for i in range(m):
rel_layer_func = torch.nn.Sequential(
torch.nn.Linear(2*self.h_dim, self.h_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.h_dim, self.h_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.h_dim, self.h_dim),
torch.nn.LeakyReLU()
)
h.append(rel_layer_func)
self.h = torch.nn.ModuleList(h)
f_in_dim = self.h_dim
self.f = torch.nn.Sequential(
torch.nn.Linear(f_in_dim, self.f_dim),
torch.nn.LeakyReLU(),
torch.nn.Linear(self.f_dim, self.f_dim),
torch.nn.LeakyReLU()
)
self.f_final = torch.nn.Linear(self.f_dim, 1)
def forward(self, batch):
batch_size = batch.size()[0]
#Panel preprocessor CNN
batch_flat = batch.reshape(batch_size*16, 1, 80, 80)
if self.use_mag_enc:
with torch.no_grad():
#magnitude encoding
batch_flat = batch_flat.transpose(1, 3)
if self.mag_enc_type_relu:
#first order
batch_flat = batch_flat.add_(255/self.magnitude_encoding_dim)
batch_flat = torch.nn.functional.relu_(batch_flat.mul_(self.input_scale).add_(self.input_offset).add(-self.mag_scale))
#second order
batch_flat = torch.cat((batch_flat[:, :, :, :-1] - 2*batch_flat[:, :, :, 1:], batch_flat[:, :, :, -1].unsqueeze(dim=-1)), dim=-1).mul_(self.magnitude_encoding_dim/2)
batch_flat = torch.nn.functional.relu_(batch_flat)
else:
batch_flat = batch_flat.mul_(self.input_scale).add_(self.input_offset).tanh_().add(self.mag_scale).mul_(self.input_encoding_variance_inv).pow_(2).mul_(-1).exp_().mul_(self.normalization_factor)
batch_flat = batch_flat.transpose(3, 1)
conv_out = self.conv(batch_flat)
#scatter context
objectsWithoutPos = self.post_cnn_linear(conv_out.reshape(batch_size*16, -1))
panel_vectors = objectsWithoutPos.reshape(batch_size, 16, 256-9)
given, option1, option2, option3, option4, option5, option6, option7, option8 = panel_vectors.split((8, 1, 1, 1, 1, 1, 1, 1, 1), dim=1)
optionsWithContext = torch.cat((
given, option1,
given, option2,
given, option3,
given, option4,
given, option5,
given, option6,
given, option7,
given, option8
), 1)
optionsWithoutPos = optionsWithContext.reshape(batch_size*8*9, 256-9)
objects = torch.cat((optionsWithoutPos, self.tag_matrix.repeat(batch_size, 1)), dim=1).reshape(batch_size*8, 9, 256-9+9)
#MLRN
objPairs2D = getCombinations(objects, batch_size*8, 9, 256)
objPairs = objPairs2D.reshape(batch_size*8*(9*9), 2*256)
gResult = self.g(objPairs)#apply MLP
prev_result = gResult
prev_dim = self.h_dim
prev_result_2d = prev_result.reshape(batch_size*8, 9, 9, prev_dim)
sum_j = prev_result_2d.sum(dim=2)
for i, h_layer in enumerate(self.h):
residual = sum_j
intermed_obj_pairs_2d = getCombinations(sum_j, batch_size*8, 9, prev_dim)
intermed_obj_pairs = intermed_obj_pairs_2d.reshape(batch_size*8*(9*9), 2*prev_dim)
prev_result = h_layer(intermed_obj_pairs)#apply MLP
prev_dim = self.h_dim
prev_result_2d = prev_result.reshape(batch_size*8, 9, 9, prev_dim)
sum_j = prev_result_2d.sum(dim=2)
hSum = sum_j.sum(dim=1)
result = self.f_final(self.f(hSum))#pre-softmax scores for every possible answer
answer = result.reshape(batch_size, 8)
#attempt to stabilize training (avoiding inf value activations in last layers)
activation_loss = hSum.pow(2).mean() + result.pow(2).mean()
return answer, activation_loss
def worker_fn(rank, world_size):
setup(rank, world_size)
weights_filename = "weights.pt"
batch_size = 512
epochs = 240
warmup_epochs = 8
use_mixed_precision = True
batch_size = batch_size // world_size #batch size per worker
#Data
all_data = os.listdir(datapath_preprocessed)
train_filenames = [p for p in all_data if re.match(r'^PGM_' + re.escape(dataset_name) + r'_train_(\d+)\.npz$', p) is not None]
val_filenames = [p for p in all_data if re.match(r'^PGM_' + re.escape(dataset_name) + r'_val_(\d+)\.npz$', p) is not None]
train_dataset = PgmDataset(train_filenames)
train_sampler = torch.utils.data.distributed.DistributedSampler(train_dataset)
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, num_workers=8, pin_memory=False, sampler=train_sampler)#shuffle is done by the sampler
val_dataloader = DataLoader(PgmDataset(val_filenames), batch_size=batch_size, shuffle=False, num_workers=4, pin_memory=False)
#Model
device_ids = [rank]
model = WReN(2).to(device_ids[0])#3-layer MLRN
if weights_filename is not None and os.path.isfile("./" + weights_filename):
model.load_state_dict(torch.load(weights_filename, map_location='cpu'))
print('Weights loaded')
cold_start = False
else:
print('No weights found')
cold_start = True
#Loss and optimizer
final_lr = 2e-3
def add_module_params_with_decay(module, weight_decay, param_groups):#adds parameters with decay unless they are bias parameters, which shouldn't receive decay
group_with_decay = []
group_without_decay = []
for name, param in module.named_parameters():
if not param.requires_grad: continue
if name == 'bias' or name.endswith('bias'):
group_without_decay.append(param)
else:
group_with_decay.append(param)
param_groups.append({"params": group_with_decay, "weight_decay": weight_decay})
param_groups.append({"params": group_without_decay})
optimizer_param_groups = [
]
add_module_params_with_decay(model.conv, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.post_cnn_linear, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.g, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.h, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.f, 2e-1, optimizer_param_groups)
add_module_params_with_decay(model.f_final, 2e-1, optimizer_param_groups)
optimizer = Lamb(optimizer_param_groups, lr=final_lr)
base_model = model
if use_mixed_precision:
model, optimizer = amp.initialize(model, optimizer, opt_level="O1") #Mixed Precision
lossFunc = torch.nn.CrossEntropyLoss()
softmax = torch.nn.Softmax(dim=1)
#Parallel distributed model
device = device_ids[0]
torch.cuda.set_device(device)
parallel_model = torch.nn.parallel.DistributedDataParallel(model, device_ids)
if rank == 0:
#accuracy logging
sess = tf.Session()
train_acc_placeholder = tf.placeholder(tf.float32, shape=())
train_acc_summary = tf.summary.scalar('training_acc', train_acc_placeholder)
val_acc_placeholder = tf.placeholder(tf.float32, shape=())
val_acc_summary = tf.summary.scalar('validation_acc', val_acc_placeholder)
writer = tf.summary.FileWriter("log", sess.graph)
#training loop
acc = []
global_step = 0
for epoch in range(epochs):
train_sampler.set_epoch(epoch)
# Validation
val_acc = []
parallel_model.eval()
with torch.no_grad():
for i, (local_batch, local_labels) in enumerate(val_dataloader):
local_batch, targets = local_batch.to(device), local_labels.to(device)
#answer = model(local_batch.type(torch.float32))
answer, _ = parallel_model(local_batch.type(torch.float32))
#Calc accuracy
answerSoftmax = softmax(answer)
maxIndex = answerSoftmax.argmax(dim=1)
correct = maxIndex.eq(targets)
accuracy = correct.type(dtype=torch.float16).mean(dim=0)
val_acc.append(accuracy)
if i % 50 == 0 and rank == 0:
print("batch " + str(i))
total_val_acc = sum(val_acc) / len(val_acc)
print('Validation accuracy: ' + str(total_val_acc.item()))
if rank == 0:
summary = sess.run(val_acc_summary, feed_dict={val_acc_placeholder: total_val_acc.item()})
writer.add_summary(summary, global_step=global_step)
# Training
parallel_model.train()
for i, (local_batch, local_labels) in enumerate(train_dataloader):
global_step = global_step + 1
if cold_start and epoch < warmup_epochs:#linear scaling of the lr for warmup during the first few epochs
lr = final_lr * global_step / (warmup_epochs*len(train_dataset) / (batch_size * world_size))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
local_batch, targets = local_batch.to(device_ids[0]), local_labels.to(device_ids[0])
optimizer.zero_grad()
answer, activation_loss = parallel_model(local_batch.type(torch.float32))
loss = lossFunc(answer, targets) + activation_loss * 2e-3
#Calc accuracy
answerSoftmax = softmax(answer)
maxIndex = answerSoftmax.argmax(dim=1)
correct = maxIndex.eq(targets)
accuracy = correct.type(dtype=torch.float16).mean(dim=0)
acc.append(accuracy)
#Training step
if use_mixed_precision:
with amp.scale_loss(loss, optimizer) as scaled_loss: #Mixed precision
scaled_loss.backward()
else:
loss.backward()
grad_norm = torch.nn.utils.clip_grad_norm_(parallel_model.parameters(), 1e1)
optimizer.step()
if i % 50 == 0 and rank == 0:
print("epoch " + str(epoch) + " batch " + str(i))
print("loss", loss)
print("activation loss", activation_loss)
print(grad_norm)
#logging and saving weights
if i % 1000 == 999:
trainAcc = sum(acc) / len(acc)
acc = []
print('Training accuracy: ' + str(trainAcc.item()))
if rank == 0:
if weights_filename is not None:
torch.save(base_model.state_dict(), weights_filename)
print('Weights saved')
summary = sess.run(train_acc_summary, feed_dict={train_acc_placeholder: trainAcc.item()})
writer.add_summary(summary, global_step=global_step)
if cold_start and weights_filename is not None and epoch % 10 == 0 and rank == 0:
torch.save(base_model.state_dict(), weights_filename + "_cp" + str(epoch))
print('Checkpoint saved')
cleanup()
def setup(rank, world_size):
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '12355'
# initialize the process group
torch.distributed.init_process_group("nccl", rank=rank, world_size=world_size)
# Explicitly setting seed to make sure that models created in two processes
# start from same random weights and biases.
torch.manual_seed(42)
def cleanup():
torch.distributed.destroy_process_group()
def run(world_size):
torch.multiprocessing.spawn(worker_fn, args=(world_size,), nprocs=world_size, join=True)
if __name__ == "__main__":
run(4)#4 GPUs
| 39.296104 | 213 | 0.625289 | 6,548 | 0.432811 | 0 | 0 | 0 | 0 | 0 | 0 | 1,633 | 0.107938 |
921ca360ae33219bfdbd1ac2f46fb2050a3108c9 | 45,163 | py | Python | python/3D-rrt/pvtrace/Geometry.py | siddhu95/mcclanahoochie | 6df72553ba954b52e949a6847a213b22f9e90157 | [
"Apache-2.0"
] | 1 | 2020-12-27T21:37:35.000Z | 2020-12-27T21:37:35.000Z | python/3D-rrt/pvtrace/Geometry.py | siddhu95/mcclanahoochie | 6df72553ba954b52e949a6847a213b22f9e90157 | [
"Apache-2.0"
] | null | null | null | python/3D-rrt/pvtrace/Geometry.py | siddhu95/mcclanahoochie | 6df72553ba954b52e949a6847a213b22f9e90157 | [
"Apache-2.0"
] | null | null | null | # pvtrace is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# pvtrace is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import numpy as np
import scipy as sp
import scipy.linalg
from external.transformations import translation_matrix, rotation_matrix
import external.transformations as tf
from external.quickhull import qhull3d
import logging
import pdb
#logging.basicConfig(filename="/tmp/geom-debug.txt", level=logging.DEBUG, filemode="w")
def cmp_floats(a,b):
abs_diff = abs(a-b)
if abs_diff < 1e-12:
return True
else:
return False
def cmp_floats_range(a,b):
if cmp_floats(a,b):
return 0
elif a < b:
return -1
else:
return 1
def intervalcheck(a,b,c):
"""
Returns whether a <= b <= c is True or False
"""
if cmp_floats(a,b) == True or cmp_floats(b,c) == True:
return True
if a<b and b<c:
return True
else:
return False
def intervalcheckstrict(a,b,c):
"""
Returns whether a < b < c is True or False
"""
if a<b and b<c:
return True
else:
return False
def smallerequalto(a,b):
"""
Returns whether a<=b is True or False
"""
if cmp_floats(a,b) == True:
return True
if a<b:
return True
else:
return False
def round_zero_elements(point):
for i in range(0,len(point)):
if cmp_floats(0.0, point[i]):
point[i] = 0.0
return point
def cmp_points(a,b):
if a is None:
return False
if b is None:
return False
al = list(a)
bl = list(b)
for e1, e2 in zip(al, bl):
ans = cmp_floats(e1,e2)
if ans is False:
return False
return True
def flatten(l, ltypes=(list, tuple)):
ltype = type(l)
l = list(l)
i = 0
while i < len(l):
while isinstance(l[i], ltypes):
if not l[i]:
l.pop(i)
i -= 1
break
else:
l[i:i + 1] = l[i]
i += 1
return ltype(l)
def separation(beginning, end):
return magnitude(np.array(end)-np.array(beginning))
def magnitude(vector):
return np.sqrt(np.dot(np.array(vector),np.array(vector)))
def norm(vector):
return np.array(vector)/magnitude(np.array(vector))
def angle(normal, vector):
assert cmp_floats(magnitude(normal), 1.0), "The normal vector is not normalised."
dot = np.dot(normal, vector)
return np.arccos(dot / magnitude(vector))
def reflect_vector(normal, vector):
d = np.dot(normal, vector)
return vector - 2 * d * normal
def closest_point(reference, point_list):
"""Return the closest point in the list of reference points."""
separations = []
for point in point_list:
t = separation(reference, point)
separations.append(t)
sort_index = np.array(separations).argsort()
return point_list[sort_index[0]]
def transform_point(point, transform):
return np.array(np.dot(transform, np.matrix(np.concatenate((point, [1.]))).transpose()).transpose()[0,0:3]).squeeze()
def transform_direction(direction, transform):
angle, axis, point = tf.rotation_from_matrix(transform)
rotation_transform = tf.rotation_matrix(angle, axis)
return np.array(np.dot(rotation_transform, np.matrix(np.concatenate((direction, [1.]))).transpose()).transpose()[0,0:3]).squeeze()
def rotation_matrix_from_vector_alignment(before, after):
"""
>>> # General input/output test
>>> V1 = norm(np.random.random(3))
>>> V2 = norm([1,1,1])
>>> R = rotation_matrix_from_vector_alignment(V1, V2)
>>> V3 = transform_direction(V1, R)
>>> cmp_points(V2, V3)
True
>>> # Catch the special case in which we cannot take the cross product
>>> V1 = [0,0,1]
>>> V2 = [0,0,-1]
>>> R = rotation_matrix_from_vector_alignment(V1, V2)
>>> V3 = transform_direction(V1, R)
>>> cmp_points(V2, V3)
True
"""
# The angle between the vectors must not be 0 or 180 (i.e. so we can take a cross product)
thedot = np.dot(before, after)
if cmp_floats(thedot, 1.) == True:
# Vectors are parallel
return tf.identity_matrix()
if cmp_floats(thedot, -1.) == True:
# Vectors are anti-parallel
print "Vectors are anti-parallel this might crash."
axis = np.cross(before, after) # get the axis of rotation
angle = np.arccos(np.dot(before, after)) # get the rotation angle
return rotation_matrix(angle, axis)
class Ray(object):
"""A ray in the global cartesian frame."""
def __init__(self, position=[0.,0.,0.], direction=[0.,0.,1.]):
self.__position = np.array(position)
self.__direction = np.array(direction)/np.sqrt(np.dot(direction,np.array(direction).conj()))
def getPosition(self):
return self.__position
def setPosition(self, position):
self.__position = round_zero_elements(position)
def getDirection(self):
return self.__direction
def setDirection(self, direction):
self.__direction = np.array(direction)/np.sqrt(np.dot(direction,np.array(direction).conj()))
def stepForward(self, distance):
self.position = self.position + distance * self.direction
def behind(self, point):
# Create vector from position to point, if the angle is
# greater than 90 degrees then the point is behind the ray.
v = np.array(point) - np.array(self.position)
if cmp_points([0,0,0], v):
# The ray is at the point
return False
if angle(self.direction, v) > np.pi*.5:
return True
return False
# Define properties
direction = property(getDirection, setDirection)
position = property(getPosition, setPosition)
class Intersection(object):
"""Defines the intersection between a ray and a geometrical objects."""
def __init__(self, ray, point, receiver):
"""An intersection is defined as the point that a ray and receiver meet. This class is simiply a wapper for these deatils. Intersection objects can be sorted with respect to their separations (distance from the ray.position to the point of intersection), this length is returned with intersection_obj.separation."""
super(Intersection, self).__init__()
self.ray = ray
self.point = point
self.receiver = receiver
self.separation = separation(point, ray.position)
def __str__(self):
return str(' point ' + str(self.point) + ' receiver ' + str(self.receiver))
def __cmp__(self, other):
return cmp(self.separation, other.separation)
class Plane(object):
"""A infinite plane going though the origin point along the positive z axis. At 4x4 transformation matrix can be applied to the generated other planes."""
def __init__(self, transform=None):
'''Transform is a 4x4 transformation matrix that rotates and translates the plane into the global frame (a plane in the xy plane point with normal along (+ve) z).'''
super(Plane, self).__init__()
self.transform = transform
if self.transform == None:
self.transform = tf.identity_matrix()
def append_transform(self, new_transform):
self.transform = np.dot(self.transform, new_transform)
def contains(self, point):
return False
def on_surface(self, point):
"""Returns True is the point is on the plane's surface and false otherwise."""
inv_transform = tf.inverse_matrix(self.transform)
rpos = transform_point(ray.position, inv_transform)
if cmp_floats(rpos, 0.):
return True
return False
def surface_identifier(self, surface_point, assert_on_surface = True):
raise 'planarsurf'
def surface_normal(self, ray, acute=True):
normal = transform_direction((0,0,1), self.transform)
if acute:
if angle(normal, rdir) > np.pi/2:
normal = normal * -1.0
return normal
def intersection(self, ray):
"""
Returns the intersection point of the ray with the plane. If no intersection occurs None is returned.
>>> ray = Ray(position=[0.5, 0.5, -0.5], direction=[0,0,1])
>>> plane = Plane()
>>> plane.intersection(ray)
[array([ 0.5, 0.5, 0. ])]
>>> ray = Ray(position=[0.5, 0.5, -0.5], direction=[0,0,1])
>>> plane = Plane()
>>> plane.transform = tf.translation_matrix([0,0,1])
>>> plane.intersection(ray)
[array([ 0.5, 0.5, 1. ])]
>>> ray = Ray(position=[0.5, 0.5, -0.5], direction=[0,0,1])
>>> plane = Plane()
>>> plane.append_transform(tf.translation_matrix([0,0,1]))
>>> plane.append_transform(tf.rotation_matrix(np.pi,[1,0,0]))
>>> plane.intersection(ray)
[array([ 0.5, 0.5, 1. ])]
"""
# We need apply the anti-transform of the plane to the ray. This gets the ray in the local frame of the plane.
inv_transform = tf.inverse_matrix(self.transform)
rpos = transform_point(ray.position, inv_transform)
rdir = transform_direction(ray.direction, inv_transform)
# Ray is in parallel to the plane -- there is no intersection
if rdir[2] == 0.0:
return None
t = -rpos[2]/rdir[2]
# Intersection point is behind the ray
if t < 0.0:
return None
# Convert local frame to world frame
point = rpos + t*rdir
return [transform_point(point, self.transform)]
class FinitePlane(Plane):
"""A subclass of Plane but that has a finite size. The size of the plane
is specified as the the plane was sitting in the xy-plane of a Cartesian
system. The transformations are used dor the positioning.
>>> fp = FinitePlane(length=1, width=1)
>>> fp.intersection(Ray(position=(0,0,1), direction=(0,0,-1)))
[array([ 0., 0., 0.])]
>>> fp = FinitePlane(length=1, width=1)
>>> fp.intersection(Ray(position=(0,0,1), direction=(.5,.25,-1)))
[array([ 0.5 , 0.25, 0. ])]
>>> fp = FinitePlane(length=1, width=1)
>>> fp.append_transform(translation_matrix((2,0,0)))
>>> fp.intersection(Ray(position=(0,0,1), direction=(0,0,-1)))
"""
def __init__(self, length=1, width=1):
super(FinitePlane, self).__init__()
self.length = length
self.width = width
def append_transform(self, new_transform):
super(FinitePlane, self).append_transform(new_transform)
def on_surface(self, point):
"""Returns True if the point is on the plane's surface and false otherwise."""
inv_transform = tf.inverse_matrix(self.transform)
rpos = transform_point(ray.position, inv_transform)
if cmp_floats(rpos, 0.) and (0. < rpos[0] <= self.length ) and (0. < rpos[1] <= self.width):
return True
return False
def intersection(self, ray):
"""Returns a intersection point with a ray and the finte plane."""
points = super(FinitePlane, self).intersection(ray)
# Is point in the finite plane bounds
local_point = transform_point(points[0], self.transform)
if (0. <= local_point[0] <= self.length ) and (0. <= local_point[1] <= self.width):
return points
return None
class Polygon(object):
"""
A (2D) Polygon with n (>2) points
Only konvex polygons are allowed! Order of points is of course important!
"""
def __init__(self, points):
super(Polygon, self).__init__()
self.pts = points
#check if points are in one plane
assert len(self.pts) >= 3, "You need at least 3 points to build a Polygon"
if len(self.pts) > 3:
x_0 = np.array(self.pts[0])
for i in range(1,len(self.pts)-2):
#the determinant of the vectors (volume) must always be 0
x_i = np.array(self.pts[i])
x_i1 = np.array(self.pts[i+1])
x_i2 = np.array(self.pts[i+2])
det = np.linalg.det([x_0-x_i, x_0-x_i1, x_0-x_i2])
assert cmp_floats( det, 0.0 ), "Points must be in a plane to create a Polygon"
def on_surface(self, point):
"""Returns True if the point is on the polygon's surface and false otherwise."""
n = len(self.pts)
anglesum = 0
p = np.array(point)
for i in range(n):
v1 = np.array(self.pts[i]) - p
v2 = np.array(self.pts[(i+1)%n]) - p
m1 = magnitude(v1)
m2 = magnitude(v2)
if cmp_floats( m1*m2 , 0. ):
return True #point is one of the nodes
else:
# angle(normal, vector)
costheta = np.dot(v1,v2)/(m1*m2)
anglesum = anglesum + np.arccos(costheta)
return cmp_floats( anglesum , 2*np.pi )
def contains(self, point):
return False
def surface_identifier(self, surface_point, assert_on_surface = True):
return "polygon"
def surface_normal(self, ray, acute=False):
vec1 = np.array(self.pts[0])-np.array(self.pts[1])
vec2 = np.array(self.pts[0])-np.array(self.pts[2])
normal = norm( np.cross(vec1,vec2) )
return normal
def intersection(self, ray):
"""Returns a intersection point with a ray and the polygon."""
n = self.surface_normal(ray)
#Ray is parallel to the polygon
if cmp_floats( np.dot( np.array(ray.direction), n ), 0. ):
return None
t = 1/(np.dot(np.array(ray.direction),n)) * ( np.dot(n,np.array(self.pts[0])) - np.dot(n,np.array(ray.position)) )
#Intersection point is behind the ray
if t < 0.0:
return None
#Calculate intersection point
point = np.array(ray.position) + t*np.array(ray.direction)
#Check if intersection point is really in the polygon or only on the (infinite) plane
if self.on_surface(point):
return [list(point)]
return None
class Box(object):
"""An axis aligned box defined by an minimum and extend points (array/list like values)."""
def __init__(self, origin=(0,0,0), extent=(1,1,1)):
super(Box, self).__init__()
self.origin = np.array(origin)
self.extent = np.array(extent)
self.points = [origin, extent]
self.transform = tf.identity_matrix()
def append_transform(self, new_transform):
self.transform = tf.concatenate_matrices(new_transform, self.transform)
def contains(self, point):
"""Returns True is the point is inside the box or False if it is not or is on the surface.
>>> box = Box([1,1,1], [2,2,2])
>>> box.contains([2,2,2])
False
>>> box = Box([1,1,1], [2,2,2])
>>> box.contains([3,3,3])
False
>>> # This point is not the surface within rounding errors
>>> box = Box([0,0,0], [1,1,1])
>>> box.contains([ 0.04223342, 0.99999999999999989 , 0.35692177])
False
>>> box = Box([0,0,0], [1,1,1])
>>> box.contains([ 0.04223342, 0.5 , 0.35692177])
True
"""
#local_point = transform_point(point, tf.inverse_matrix(self.transform))
#for pair in zip((self.origin, local_point, self.extent)):
# if not pair[0] < pair[1] < pair[2]:
# return False
#return True
local_point = transform_point(point, tf.inverse_matrix(self.transform))
for i in range(0,3):
#if not (self.origin[i] < local_point[i] < self.extent[i]):
# Want to make this comparison: self.origin[i] < local_point[i] < self.extent[i]
c1 = cmp_floats_range(self.origin[i], local_point[i])
#print self.origin[i], " is less than ", local_point[i]
if c1 == -1:
b1 = True
else:
b1 = False
#print b1
c2 = cmp_floats_range(local_point[i], self.extent[i])
#print local_point[i], " is less than ", self.extent[i]
if c2 == -1:
b2 = True
else:
b2 = False
#print b2
if not (b1 and b2):
return False
return True
""" # Alternatively:
local_point = transform_point(point, tf.inverse_matrix(self.transform))
def_points = np.concatenate((np.array(self.origin), np.array(self.extent)))
containbool = True
for i in range(0,3):
if intervalcheckstrict(def_points[i],local_point[i],def_points[i+3]) == False:
containbool = False
return containbool
"""
def surface_identifier(self, surface_point, assert_on_surface = True):
"""
Returns an unique identifier that specifies the surface which holds the surface_points.
self.on_surface(surface_point) must return True, otherwise an assert error is thrown.
Example, for a Box with origin=(X,Y,Z), and size=(L,W,H) has the identifiers:
"left":(X,y,z)
"right":(X+L,y,z)
"near":(x,Y,z)
"far":(x,Y+W,z)
"bottom":(x,y,H)
"top":(x,y,Z+H)
"""
# Get an axis-aligned point... then this is really easy.
local_point = transform_point(surface_point, tf.inverse_matrix(self.transform))
# the local point must have at least one common point with the surface definition points
def_points = np.concatenate((np.array(self.origin), np.array(self.extent)))
#surface_id[0]=0 => left
#surface_id[1]=0 => near
#surface_id[2]=0 => bottom
#surface_id[3]=0 => right
#surface_id[4]=0 => far
#surface_id[5]=0 => top
#import pdb; pdb.set_trace()
surface_id_array = [0,0,0,0,0,0]
boolarray = [False, False, False]
for i in range(0,3):
if cmp_floats(def_points[i], local_point[i]):
for j in range(0,3):
if intervalcheck(def_points[j],local_point[j],def_points[j+3]):
surface_id_array[i] = 1
boolarray[j] = True
if cmp_floats(def_points[i+3], local_point[i]):
for j in range(0,3):
if intervalcheck(def_points[j],local_point[j],def_points[j+3]):
surface_id_array[i+3] = 1
boolarray[j] = True
if assert_on_surface == True:
assert boolarray[0] == boolarray[1] == boolarray[2] == True
surface_name = []
if surface_id_array[0] == 1:
surface_name.append('left')
if surface_id_array[1] == 1:
surface_name.append('near')
if surface_id_array[2] == 1:
surface_name.append('bottom')
if surface_id_array[3] == 1:
surface_name.append('right')
if surface_id_array[4] == 1:
surface_name.append('far')
if surface_id_array[5] == 1:
surface_name.append('top')
"""
The following helps to specify if the local_point is located on a corner
or edge of the box. If that is not desired, simply return surface_name[0].
"""
# return surface_name[0]
return_id = ''
for j in range(len(surface_name)):
return_id = return_id + surface_name[j] + ''
return return_id
def on_surface(self, point):
"""Returns True if the point is on the surface False otherwise.
>>> box = Box([1,1,1], [2,2,2])
>>> box.on_surface([2,2,2])
True
>>> box = Box([1,1,1], [2,2,2])
>>> box.on_surface([4,4,4])
False
>>> box = Box(origin=(0, 0, 1.1000000000000001), extent=np.array([ 1. , 1. , 2.1]))
>>> ray = Ray(position=(.5,.5, 2.1), direction=(0,0,1))
>>> box.on_surface(ray.position)
True
"""
if self.contains(point) == True:
return False
# Get an axis-aligned point... then this is really easy.
local_point = transform_point(point, tf.inverse_matrix(self.transform))
# the local point must have at least one common point with the surface definition points
def_points = np.concatenate((np.array(self.origin), np.array(self.extent)))
bool1 = False
bool2 = False
bool3 = False
boolarray = [bool1, bool2, bool3]
for i in range(0,3):
if cmp_floats(def_points[i], local_point[i]):
for j in range(0,3):
if intervalcheck(def_points[j],local_point[j],def_points[j+3]):
boolarray[j] = True
if cmp_floats(def_points[i+3], local_point[i]):
for j in range(0,3):
if intervalcheck(def_points[j],local_point[j],def_points[j+3]):
boolarray[j] = True
if boolarray[0] == boolarray[1] == boolarray[2] == True:
return True
return False
def surface_normal(self, ray, acute=True):
"""
Returns the normalised vector of which is the acute surface normal (0<~ theta <~ 90)
with respect to ray direction. If acute=False is specified the reflex
normal is returned (0<~ theta < 360) The ray must be on the surface
otherwise an error is raised.
>>> box = Box([0,0,0], [1,1,1])
>>> ray = Ray([0.5,0.5,1], [0,0,1])
>>> box.surface_normal(ray)
array([ 0., 0., 1.])
>>> box = Box([1,1,1], [2,2,2])
>>> ray = Ray([1.5,1.5,2], [0,0,1])
>>> box.surface_normal(ray)
array([ 0., 0., 1.])
"""
#pdb.set_trace()
assert self.on_surface(ray.position), "The point is not on the surface of the box."
invtrans = tf.inverse_matrix(self.transform)
rpos = transform_point(ray.position, invtrans)
rdir = transform_direction(ray.direction, invtrans)
# To define a flat surface, 3 points are needed.
common_index = None
exit = False
reference_point = list(self.origin)
for ref in reference_point:
if not exit:
for val in rpos:
#logging.debug(str((ref,val)))
if cmp_floats(ref,val):
#logging.debug("Common value found, " + str(val) + " at index" + str(list(rpos).index(val)))
common_index = list(rpos).index(val)
exit = True
break
exit = False
if common_index == None:
reference_point = list(self.extent)
for ref in reference_point:
if not exit:
for val in rpos:
#logging.debug(str((ref,val)))
if cmp_floats(ref,val):
#logging.debug("Common value found, " + str(val) + " at index" + str(list(rpos).index(val)))
common_index = list(rpos).index(val)
exit = True
break
assert common_index != None, "The intersection point %s doesn't share an element with either the origin %s or extent points %s (all points transformed into local frame)." % (rpos, self.origin, self.extent)
normal = np.zeros(3)
if list(self.origin) == list(reference_point):
normal[common_index] = -1.
else:
normal[common_index] = 1.
if acute:
if angle(normal, rdir) > np.pi/2:
normal = normal * -1.0
assert 0 <= angle(normal, rdir) <= np.pi/2, "The normal vector needs to be pointing in the same direction quadrant as the ray, so the angle between them is between 0 and 90"
# remove signed zeros this just makes the doctest work. Signed zeros shouldn't really effect the maths but makes things neat.
for i in range(0,3):
if normal[i] == 0.0:
normal[i] = 0.0
return transform_direction(normal, self.transform)
def intersection(self, ray):
'''Returns an array intersection points with the ray and box. If no intersection occurs
this function returns None.
# Inside-out single intersection
>>> ray = Ray(position=[0.5,0.5,0.5], direction=[0,0,1])
>>> box = Box()
>>> box.intersection(ray)
[array([ 0.5, 0.5, 1. ])]
# Inside-out single intersection with translation
>>> ray = Ray(position=[0.5,0.5,0.5], direction=[0,0,1])
>>> box = Box()
>>> box.transform = tf.translation_matrix([0,0,1])
>>> box.intersection(ray)
[array([ 0.5, 0.5, 1. ]), array([ 0.5, 0.5, 2. ])]
>>> ray = Ray(position=[0.5,0.5,0.5], direction=[0,0,1])
>>> box = Box()
>>> box.append_transform(tf.rotation_matrix(2*np.pi, [0,0,1]))
>>> box.intersection(ray)
[array([ 0.5, 0.5, 1. ])]
>>> ray = Ray(position=[0.5,0.5,0.5], direction=[0,0,1])
>>> box = Box()
>>> box.append_transform(tf.rotation_matrix(2*np.pi, norm([1,1,0])))
>>> box.append_transform(tf.translation_matrix([0,0,1]))
>>> box.intersection(ray)
[array([ 0.5, 0.5, 1. ]), array([ 0.5, 0.5, 2. ])]
Here I am using the the work of Amy Williams, Steve Barrus, R. Keith Morley, and
Peter Shirley, "An Efficient and Robust Ray-Box Intersection Algorithm" Journal of
graphics tools, 10(1):49-54, 2005'''
invtrans = tf.inverse_matrix(self.transform)
rpos = transform_point(ray.position, invtrans)
rdir = transform_direction(ray.direction, invtrans)
#pts = [transform_point(self.points[0], self.transform), transform_point(self.points[1], self.transform)]
pts = [np.array(self.points[0]), np.array(self.points[1])]
rinvd = [1.0/rdir[0], 1.0/rdir[1], 1.0/rdir[2]]
rsgn = [1.0/rinvd[0] < 0.0, 1.0/rinvd[1] < 0.0, 1.0/rinvd[2] < 0.0]
tmin = (pts[rsgn[0]][0] - rpos[0]) * rinvd[0]
tmax = (pts[1-rsgn[0]][0] - rpos[0]) * rinvd[0]
tymin = (pts[rsgn[1]][1] - rpos[1]) * rinvd[1]
tymax = (pts[1-rsgn[1]][1] - rpos[1]) * rinvd[1]
#Bug here: this is the exit point with bug1.py
if (tmin > tymax) or (tymin > tmax):
return None
if tymin > tmin:
tmin = tymin
if tymax < tmax:
tmax = tymax
tzmin = (pts[rsgn[2]][2] - rpos[2]) * rinvd[2]
tzmax = (pts[1-rsgn[2]][2] - rpos[2]) * rinvd[2]
if (tmin > tzmax) or (tzmin > tmax):
return None
if tzmin > tmin:
tmin = tzmin
if tzmax < tmax:
tmax = tzmax
# Calculate the hit coordinates then if the solution is in the forward direction append to the hit list.
hit_coordinates = []
pt1 = rpos + tmin * rdir
pt2 = rpos + tmax * rdir
#pt1_sign = np.dot(pt1, rdir)
#pt2_sign = np.dot(pt2, rdir)
#print "tmin", tmin, "tmax", tmax
if tmin >= 0.0:
hit_coordinates.append(pt1)
if tmax >= 0.0:
hit_coordinates.append(pt2)
#print hit_coordinates
if len(hit_coordinates) == 0:
return None
# Convert hit coordinate back to the world frame
hit_coords_world = []
for point in hit_coordinates:
hit_coords_world.append(transform_point(point, self.transform))
return hit_coords_world
class Cylinder(object):
"""
Parameterised standard representation of a cylinder. The axis is aligned along z but the radius
and the length of the cylinder can be specified. A transformation must be applied to use
centered at a different location of angle.
"""
def __init__(self, radius=1, length=1):
super(Cylinder, self).__init__()
self.radius = radius
self.length = length
self.transform = tf.identity_matrix()
def append_transform(self, new_transform):
self.transform = tf.concatenate_matrices(new_transform, self.transform)
def contains(self, point):
"""
Returns True if the point in inside the cylinder and False if it is on the surface or outside.
>>> # Inside
>>> cylinder = Cylinder(.5, 2)
>>> cylinder.contains([.25, .25, 1])
True
>>> # On surface
>>> cylinder.contains([.0, .0, 2.])
False
>>> # Outside
>>> cylinder.contains([-1,-1,-1])
False
"""
if self.on_surface(point) == True:
return False
local_point = transform_point(point, tf.inverse_matrix(self.transform))
origin_z = 0.
xydistance = np.sqrt(local_point[0]**2 + local_point[1]**2)
if intervalcheckstrict(origin_z, local_point[2], self.length) == True and xydistance<self.radius:
return True
else:
return False
def surface_normal(self, ray, acute=True):
"""
Return the surface normal for a ray on the shape surface.
An assert error is raised if the ray is not on the surface.
>>> cylinder = Cylinder(2, 2)
>>> #Bottom cap in
>>> ray = Ray([0,0,0], [0,0,1])
>>> cylinder.surface_normal(ray)
array([ 0., 0., 1.])
>>> #Bottom cap out
>>> ray = Ray([0,0,0], [0,0,-1])
>>> cylinder.surface_normal(ray)
array([ 0., 0., -1.])
>>> # End cap in
>>> ray = Ray([0,0,2], [0,0,-1])
>>> cylinder.surface_normal(ray)
array([ 0., 0., -1.])
>>> # End cap out
>>> ray = Ray([0,0,2], [0,0,1])
>>> cylinder.surface_normal(ray)
array([ 0., 0., 1.])
>>> # Radial
>>> ray = Ray([2, 0, 1], [1,0,0])
>>> cylinder.surface_normal(ray)
array([ 1., 0., 0.])
"""
assert self.on_surface(ray.position), "The ray is not on the surface."
invtrans = tf.inverse_matrix(self.transform)
rpos = transform_point(ray.position, invtrans)
rdir = transform_direction(ray.direction, invtrans)
# point on radius surface
pt_radius = np.sqrt(rpos[0]**2 + rpos[1]**2)
c0 = cmp_floats(pt_radius, self.radius)
#point on end caps
c1 = cmp_floats(rpos[2], .0)
c2 = cmp_floats(rpos[2], self.length)
# check radius first
if c0 and (c1 == c2):
normal = norm(np.array(rpos) - np.array([0,0,rpos[2]]))
elif c1:
normal = np.array([0,0,-1])
else:
# Create a vector that points from the axis of the cylinder to the ray position,
# this is the normal vector.
normal = np.array([0,0,1])
if acute:
if angle(normal, rdir) > np.pi*0.5:
normal = normal * -1.
return transform_direction(normal, self.transform)
def on_surface(self, point):
"""
>>> # On surface
>>> cylinder = Cylinder(.5, 2.)
>>> cylinder.on_surface([.0, .0, 2.])
True
"""
""" # !!! Old version !!!
local_point = transform_point(point, tf.inverse_matrix(self.transform))
# xy-component is equal to radius
pt_radius = np.sqrt(local_point[0]**2 + local_point[1]**2)
c0 = cmp_floats(pt_radius, self.radius)
#z-component is equal to zero or length
c1 = cmp_floats(local_point[2], .0)
c2 = cmp_floats(local_point[2], self.length)
if c1 or c2:
return True
elif c0:
return True
else:
return False
"""
local_point = transform_point(point, tf.inverse_matrix(self.transform))
origin_z = 0.
xydistance = np.sqrt(local_point[0]**2 + local_point[1]**2)
if intervalcheck(origin_z, local_point[2], self.length) == True:
if cmp_floats(xydistance, self.radius) == True:
return True
if smallerequalto(xydistance,self.radius):
if cmp_floats(local_point[2], origin_z) == True or cmp_floats(local_point[2], self.length) == True:
return True
return False
def surface_identifier(self, surface_point, assert_on_surface = True):
local_point = transform_point(surface_point, tf.inverse_matrix(self.transform))
origin_z = 0.
xydistance = np.sqrt(local_point[0]**2 + local_point[1]**2)
"""
Assert surface_point on surface
"""
assertbool = False
if intervalcheck(origin_z, local_point[2], self.length) == True:
if cmp_floats(xydistance, self.radius) == True:
surfacename = 'hull'
assertbool = True
if smallerequalto(xydistance,self.radius):
if cmp_floats(local_point[2], origin_z) == True:
surfacename = 'base'
assertbool = True
if cmp_floats(local_point[2], self.length) == True:
surfacename = 'cap'
assertbool = True
if assert_on_surface == True:
assert assertbool, "The assert bool is wrong."
return surfacename
def intersection(self, ray):
"""
Returns all forward intersection points with ray and the capped cylinder.
The intersection algoithm is taken from, "Intersecting a Ray with a Cylinder"
Joseph M. Cychosz and Warren N. Waggenspack, Jr., in "Graphics Gems IV",
Academic Press, 1994.
>>> cld = Cylinder(1.0, 1.0)
>>> cld.intersection(Ray([0.0,0.0,0.5], [1,0,0]))
[array([ 1. , 0. , 0.5])]
>>> cld.intersection(Ray([-5,0.0,0.5], [1,0,0]))
[array([-1. , 0. , 0.5]), array([ 1. , 0. , 0.5])]
>>> cld.intersection(Ray([.5,.5,-1], [0,0,1]))
[array([ 0.5, 0.5, 1. ]), array([ 0.5, 0.5, 0. ])]
>>> cld.intersection( Ray([0.0,0.0,2.0], [0,0,-1]))
[array([ 0., 0., 1.]), array([ 0., 0., 0.])]
>>> cld.intersection(Ray([-0.2, 1.2,0.5],[0.75498586, -0.53837322, 0.37436697]))
[array([ 0.08561878, 0.99632797, 0.64162681]), array([ 0.80834999, 0.48095523, 1. ])]
>>> cld.intersection(Ray(position=[ 0.65993112596983427575736414, -0.036309587083015459896273569, 1. ], direction=[ 0.24273873128664008591570678, -0.81399482405912471083553328, 0.52772183462341881732271531]))
[array([ 0.65993113, -0.03630959, 1. ])]
>>> cld.transform = tf.translation_matrix([0,0,1])
>>> cld.intersection(Ray([-5,0.0,1.5], [1,0,0]))
[array([-1. , 0. , 1.5]), array([ 1. , 0. , 1.5])]
>>> cld.transform = tf.identity_matrix()
>>> cld.transform = tf.rotation_matrix(0.25*np.pi, [1,0,0])
>>> cld.intersection(Ray([-5,-.5,-0.25], [1,0,0]))
[array([-0.84779125, -0.5 , -0.25 ]), array([ 0.84779125, -0.5 , -0.25 ])]
"""
# Inverse transform the ray to get it into the cylinders local frame
inv_transform = tf.inverse_matrix(self.transform)
rpos = transform_point(ray.position, inv_transform)
rdir = transform_direction(ray.direction, inv_transform)
direction = np.array([0,0,1])
normal = np.cross(rdir, direction)
normal_magnitude = magnitude(normal)
#print normal_magnitude, "Normal magnitude"
if cmp_floats(normal_magnitude, .0):
# Ray parallel to cylinder direction
normal = norm(normal)
#d = abs(np.dot(rpos, direction))
#D = rpos - d * np.array(direction)
#if magnitude(D) <= self.radius:
# Axis aligned ray inside the cylinder volume only hits caps
#print "Inside axis aligned ray only hits caps"
bottom = Plane()
top = Plane()
top.transform = tf.translation_matrix([0,0,self.length])
p0 = top.intersection(Ray(rpos, rdir))
p1 = bottom.intersection(Ray(rpos, rdir))
cap_intersections = []
if p0 != None:
cap_intersections.append(p0)
if p1 != None:
cap_intersections.append(p1)
points = []
for point in cap_intersections:
if point[0] != None:
point = point[0]
point_radius = np.sqrt(point[0]**2 + point[1]**2)
if point_radius <= self.radius:
#print "Hit cap at point:"
#print point
#print ""
points.append(point)
if len(points) > 0:
world_points = []
for pt in points:
world_points.append(transform_point(pt, self.transform))
#print "Local points", points
#print "World points", world_points
return world_points
return None
# finish axis parallel branch
#print "Not parallel to cylinder axis."
#print ""
normal = norm(normal)
d = abs(np.dot(rpos, normal))
if d <= self.radius:
#Hit quadratic surface
O = np.cross(rpos, direction)
t = - np.dot(O,normal) / normal_magnitude
O = np.cross(normal, direction)
O = norm(O)
s = abs(np.sqrt(self.radius**2 - d**2) / np.dot(rdir, O))
t0 = t - s
p0 = rpos + t0 * rdir
t1 = t + s
p1 = rpos + t1 * rdir
points = []
if (t0 >= 0.0) and (.0 <= p0[2] <= self.length):
points.append(p0)
if (t1 >= 0.0) and (.0 <= p1[2] <= self.length):
points.append(p1)
#print "Hits quadratic surface with t0 and t1, ", t0, t1
#print ""
#print "Intersection points:"
#p0 = rpos + t0 * rdir
#p1 = rpos + t1 * rdir
# Check that hit quadratic surface in the length range
#points = []
#if (.0 <= p0[2] <= self.length) and not Ray(rpos, rdir).behind(p0):
# points.append(p0)
#
#if (.0 <= p1[2] <= self.length) and not Ray(rpos, rdir).behind(p1):
# points.append(p1)
#print points
#Now compute intersection with end caps
#print "Now to calculate caps intersections"
bottom = Plane()
top = Plane()
top.transform = tf.translation_matrix([0,0,self.length])
p2 = top.intersection(Ray(rpos, rdir))
p3 = bottom.intersection(Ray(rpos, rdir))
cap_intersections = []
if p2 != None:
cap_intersections.append(p2)
if p3 != None:
cap_intersections.append(p3)
for point in cap_intersections:
if point[0] != None:
point = point[0]
point_radius = np.sqrt(point[0]**2 + point[1]**2)
if point_radius <= self.radius:
#print "Hit cap at point:"
#print point
#print ""
points.append(point)
#print points
if len(points) > 0:
world_points = []
for pt in points:
world_points.append(transform_point(pt, self.transform))
return world_points
return None
class Convex(object):
"""docstring for Convex"""
def __init__(self, points):
super(Convex, self).__init__()
self.points = points
verts, triangles = qhull3d(points)
self.faces = range(len(triangles))
for i in range(len(triangles)):
a = triangles[i][0]
b = triangles[i][1]
c = triangles[i][2]
self.faces[i] = Polygon([verts[a], verts[b], verts[c]])
def on_surface(self, point):
for face in self.faces:
if face.on_surface(point):
return True
return False
def surface_normal(self, ray, acute=False):
for face in self.faces:
if face.on_surface(ray.position):
normal = face.surface_normal(ray, acute=acute)
if angle(normal , ray.direction) > np.pi/2:
normal = normal * -1
return normal
assert("Have not found the surface normal for this ray. Are you sure the ray is on the surface of this object?")
def surface_identifier(self, surface_point, assert_on_surface=True):
return "Convex"
def intersection(self, ray):
points = []
for face in self.faces:
pt = face.intersection(ray)
if pt != None:
points.append(np.array(pt[0]))
if len(points) > 0:
return points
return None
def contains(self, point):
ray = Ray(position=point, direction=norm(np.random.random(3)))
hit_counter = 0
for face in self.faces:
if face.on_surface(ray.position):
return False
pt = face.intersection(ray)
if pt != None:
hit_counter = hit_counter + 1
even_or_odd = hit_counter % 2
if even_or_odd == 0:
return False
return True
def centroid(self):
"""Credit:
http://orion.math.iastate.edu:80/burkardt/c_src/geometryc/geometryc.html
Returns the 'centroid' of the Convex polynomial.
"""
raise NotImplementedError("The centroid method of the Convex class is not yet implemented.")
#area = 0.0;
#for ( i = 0; i < n - 2; i++ ) {
#areat = triangle_area_3d ( x[i], y[i], z[i], x[i+1],
# y[i+1], z[i+1], x[n-1], y[n-1], z[n-1] );
#
#area = area + areat;
#*cx = *cx + areat * ( x[i] + x[i+1] + x[n-1] ) / 3.0;
#*cy = *cy + areat * ( y[i] + y[i+1] + y[n-1] ) / 3.0;
#*cz = *cz + areat * ( z[i] + z[i+1] + z[n-1] ) / 3.0;
#
#}
#
#*cx = *cx / area;
#*cy = *cy / area;
#*cz = *cz / area;
#
if __name__ == "__main__":
import doctest
#doctest.testmod()
if False:
# Catch the special case in which we cannot take the cross product
V1 = [0,0,1]
V2 = [0,0,-1]
#import pdb; pdb.set_trace()
R = rotation_matrix_from_vector_alignment(V1, V2)
R2 = rotation_matrix(np.pi, [1,0,0])
V3 = transform_direction(V1, R)
print R2
print cmp_points(V2, V3)
| 36.807661 | 323 | 0.538516 | 39,500 | 0.87461 | 0 | 0 | 0 | 0 | 0 | 0 | 18,233 | 0.403715 |
921cf7335f41595219af07b2dffd5f74a9cfc15d | 40 | py | Python | satori/sysinfo/__init__.py | mgeisler/satori | dea382bae1cd043189589c0f7d4c20b4b6725ab5 | [
"Apache-2.0"
] | 1 | 2015-01-18T19:56:28.000Z | 2015-01-18T19:56:28.000Z | satori/sysinfo/__init__.py | samstav/satori | 239fa1e3c7aac78599145c670576f0ac76a41a89 | [
"Apache-2.0"
] | null | null | null | satori/sysinfo/__init__.py | samstav/satori | 239fa1e3c7aac78599145c670576f0ac76a41a89 | [
"Apache-2.0"
] | null | null | null | """Modules for Data Plane Discovery."""
| 20 | 39 | 0.7 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 39 | 0.975 |
921eab78015b0aaf2e09f13fd81a7eedf0354c09 | 1,333 | py | Python | build/python/tests/fast_copy_mock/fast_copy_mock.py | fabio-d/fuchsia-stardock | e57f5d1cf015fe2294fc2a5aea704842294318d2 | [
"BSD-2-Clause"
] | 5 | 2022-01-10T20:22:17.000Z | 2022-01-21T20:14:17.000Z | build/python/tests/fast_copy_mock/fast_copy_mock.py | fabio-d/fuchsia-stardock | e57f5d1cf015fe2294fc2a5aea704842294318d2 | [
"BSD-2-Clause"
] | null | null | null | build/python/tests/fast_copy_mock/fast_copy_mock.py | fabio-d/fuchsia-stardock | e57f5d1cf015fe2294fc2a5aea704842294318d2 | [
"BSD-2-Clause"
] | null | null | null | # Copyright 2022 The Fuchsia Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import functools
import os
from typing import Any, Callable, List, Tuple, Union
from assembly.common import FileEntry
__all__ = [
"create_fast_copy_mock_instance", "fast_copy_mock", "mock_fast_copy_in"
]
FilePath = Union[str, os.PathLike]
def fast_copy_mock(
src: FilePath, dst: FilePath, tracked_copies: List[FileEntry]) -> None:
"""A bindable-mock of assembly.fast_copy() that tracks all of the copies
that it's asked to perform in the passed-in list.
"""
tracked_copies.append(FileEntry(source=src, destination=dst))
def create_fast_copy_mock_instance() -> Tuple[Callable, List[FileEntry]]:
"""Create a mock implementation of fast_copy() that's bound to a list of
FileEntries in which it records all the copies it's asked to make.
"""
copies = []
return (functools.partial(fast_copy_mock, tracked_copies=copies), copies)
def mock_fast_copy_in(context: Any) -> Tuple[Callable, List[FileEntry]]:
"""Insert a new mock of `fast_copy` into the context, and return it.
"""
(mock_instance, copies) = create_fast_copy_mock_instance()
context.fast_copy = mock_instance
return (mock_instance, copies)
| 33.325 | 79 | 0.731433 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 586 | 0.43961 |
ecf56bf4694e10d805e550af2ca7db64ebf0dff8 | 3,091 | py | Python | zenvlib/environmentsettings.py | zoosk/zenv | 5d7a548c7c9cca784f23fdcfd86533eaf1d68811 | [
"Apache-2.0"
] | 14 | 2016-03-10T23:00:16.000Z | 2020-02-14T22:27:41.000Z | zenvlib/environmentsettings.py | zoosk/zenv | 5d7a548c7c9cca784f23fdcfd86533eaf1d68811 | [
"Apache-2.0"
] | null | null | null | zenvlib/environmentsettings.py | zoosk/zenv | 5d7a548c7c9cca784f23fdcfd86533eaf1d68811 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 Zoosk, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class EnvironmentSettings(object):
#: The path to the build properties file
buildprops = None
#: The path to the common properties file
buildprops_common = None
#: The build command used to start a build in this checkout.
build_command = None
#: The command that runs on completion of a build.
complete_command = None
#: The path to the current workspace.
current_work = None
#: The IP address of the database box.
dbip = None
#: The dev ID for this checkout.
devid = None
#: The 3-digit, 0-padded devid
devid_padded = None
#: The hostname of the dev server for this checkout.
devserver = None
#: The command that runs on build failure.
failed_command = None
#: Whether or not the user is in ZEnv.
initialized = False
#: The user's LDAP username.
ldap_username = None
#: The path to the root of the local deploy dir.
local_deploy_dir = None
#: The root of the ZEnv checkout.
root = None
#: The directories to rsync after a build completes.
rsync_directories = None
#: The path to the deployed version of this checkout.
serverdir = None
#: The path to the global settings file.
settings = None
#: The path to the workspace folder that contains the checkouts.
workspace = None
#: The name of the checkout settings file.
workspace_settings = None
# Note that other properties will be set on this class based on the actual environment variables that you declare.
def __init__(self):
env_vars = os.environ
# Load all the vars
for key, value in env_vars.iteritems():
if key.startswith('ZENV_'):
attr_name = key[5:].lower()
setattr(self, attr_name, value)
# Add data types to non-string properties
self.initialized = (self.initialized == '1')
if self.rsync_directories is not None:
self.rsync_directories = self.rsync_directories.split(' ')
else:
self.rsync_directories = []
if self.buildprops_common == '':
self.buildprops_common = None
self._env_loaded = True
def __setattr__(self, key, value):
""" Override __setattr__ to make all properties read-only """
if hasattr(self, '_env_loaded'):
raise Exception('Properties of EnvironmentSettings objects are read-only')
else:
self.__dict__[key] = value
| 28.88785 | 118 | 0.659657 | 2,482 | 0.802976 | 0 | 0 | 0 | 0 | 0 | 0 | 1,721 | 0.556778 |
ecf908c2677c35a6ed13615d3ef45501f7f7362a | 841 | py | Python | brownbags/urls.py | openkawasaki/brownbag-django | ecdd4d2233a77922ead14afcaec289d4a0f43a1b | [
"MIT"
] | 2 | 2020-04-18T12:36:00.000Z | 2020-07-06T03:32:42.000Z | brownbags/urls.py | openkawasaki/brownbag-django | ecdd4d2233a77922ead14afcaec289d4a0f43a1b | [
"MIT"
] | 16 | 2020-04-12T13:24:26.000Z | 2020-04-12T15:54:40.000Z | brownbags/urls.py | openkawasaki/brownbag-django | ecdd4d2233a77922ead14afcaec289d4a0f43a1b | [
"MIT"
] | 3 | 2020-04-13T13:56:02.000Z | 2020-07-06T03:32:51.000Z | from django.urls import include, path
from . import views
from django.views.generic.base import RedirectView
from rest_framework import routers
from . import apis
# アプリケーションの名前空間
app_name = 'brownbags'
urlpatterns = [
path('', views.index, name='index'),
path('edit/', views.edit, name='edit'),
path('api/v1.0/shop/list/', apis.shop_list.as_view(), name='apis_shop_list'),
path('api/v1.0/shop/', apis.shop.as_view(), name='apis_shop'),
#path('api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
from rest_framework import routers
from .apis import ShopViewSet, ImageDataViewSet
#---------------------------------------------
router = routers.DefaultRouter()
router.register('api/1.0/data/shop', ShopViewSet)
router.register('api/1.0/data/image', ImageDataViewSet)
urlpatterns += router.urls | 28.033333 | 82 | 0.692033 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 301 | 0.347174 |
ecf94df74b0b0d95d2234dabb351bb41517b8183 | 5,642 | py | Python | scripts/preprocessing/setup_reads.py | shunhuahan/mcclintock | 999f064847e824d41a76791c913e24454ef6cba8 | [
"Unlicense"
] | null | null | null | scripts/preprocessing/setup_reads.py | shunhuahan/mcclintock | 999f064847e824d41a76791c913e24454ef6cba8 | [
"Unlicense"
] | null | null | null | scripts/preprocessing/setup_reads.py | shunhuahan/mcclintock | 999f064847e824d41a76791c913e24454ef6cba8 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
import os
import sys
import subprocess
import traceback
from datetime import datetime
try:
sys.path.append(snakemake.config['args']['mcc_path'])
import scripts.mccutils as mccutils
import config.preprocessing.trimgalore as trimgalore
from Bio import SeqIO
except Exception as e:
track = traceback.format_exc()
print(track, file=sys.stderr)
print("ERROR...unable to locate required external scripts at: "+snakemake.config['args']['mcc_path']+"/scripts/ or "+snakemake.config['args']['mcc_path']+"/config/preprocessing/", file=sys.stderr)
sys.exit(1)
class fileFormatError(Exception):
def __init__(self, message):
self.message = message
pass
def main():
fq1 = snakemake.input.fq1
fq2 = snakemake.params.fq2
methods = snakemake.params.methods.split(",")
processors = snakemake.threads
mcc_out = snakemake.params.out
run_id = snakemake.params.run_id
log = snakemake.params.log
# now = datetime.now()
# start = now.strftime("%Y-%m-%d %H:%M:%S")
mccutils.log("processing", "prepping reads for McClintock")
# trims adaptors of input fastq(s)
trimmedfq = fq1
trimmedfq2 = fq2
try:
check_fastqs(fq1, fq2, mcc_out, min_length=30, log=log)
if "trimgalore" in methods:
mccutils.log("processing", "running trim_galore", log=log)
if fq2 == "None":
flags = trimgalore.SINGLE_END_FLAGS
trimmedfq = run_trim_galore(fq1, run_id, log, mcc_out, cores=processors, flags=flags)
else:
flags = trimgalore.PAIRED_END_FLAGS
trimmedfq, trimmedfq2 = run_trim_galore(fq1, run_id, log, mcc_out, fq2=fq2, cores=processors, flags=flags)
run_multiqc(mcc_out+"/results/trimgalore/")
# make unzipped copies in mcc input dir
make_copies(trimmedfq, trimmedfq2, snakemake.output[0], snakemake.output[1])
# removes trimmed read files from trimgalore directory
if trimmedfq != fq1:
mccutils.remove(trimmedfq)
if trimmedfq2 != fq2:
mccutils.remove(trimmedfq2)
except Exception as e:
track = traceback.format_exc()
print(track, file=sys.stderr)
print("ERROR processing of FastQ files failed...check that your FastQ files are formatted correctly...Exiting...", file=sys.stderr)
mccutils.remove(snakemake.output[0])
mccutils.remove(snakemake.output[1])
sys.exit(1)
# now = datetime.now()
# end = now.strftime("%Y-%m-%d %H:%M:%S")
# mccutils.log("setup_reads", "start: "+start)
# mccutils.log("setup_reads", "end: "+end)
mccutils.log("processing", "read setup complete")
def make_copies(fq1, fq2, fq1copy, fq2copy):
if "gz" in fq1.split(".")[-1]:
mccutils.run_command_stdout(["zcat",fq1], fq1copy)
else:
mccutils.run_command(["cp", fq1, fq1copy])
if fq2 == "None":
mccutils.run_command(["touch", fq2copy])
elif "gz" in fq2.split(".")[-1]:
mccutils.run_command_stdout(["zcat",fq2], fq2copy)
else:
mccutils.run_command(["cp", fq2, fq2copy])
return fq1copy, fq2copy
def has_valid_read_lengths(fq1, fq2, min_length=30, paired=False):
if paired:
fqs_to_check = [fq1, fq2]
else:
fqs_to_check = [fq1]
for x,fq in enumerate(fqs_to_check):
has_valid_reads = False
for record in SeqIO.parse(fq, "fastq"):
if len(str(record.seq)) >= min_length:
has_valid_reads = True
break
if not has_valid_reads:
raise fileFormatError("fastq "+str(x+1)+" lacks any reads >= the minimum length of:"+str(min_length))
def has_valid_read_ids(fq1, fq2, log=None):
passed = mccutils.run_command(["fastq_info", fq1, fq2], log=log, fatal=False)
if not passed:
raise fileFormatError("Paired fastq files failed validation, see: "+log+" for details")
def check_fastqs(fq1, fq2, out, min_length=30, log=None):
mccutils.mkdir(out+"/tmp")
if fq2 == "None":
paired = False
else:
paired =True
fq1, fq2 = make_copies(fq1, fq2, out+"/tmp/tmp_val_fq_1.fq", out+"/tmp/tmp_val_fq_2.fq")
has_valid_read_lengths(fq1, fq2, min_length=min_length, paired=paired)
if paired:
has_valid_read_ids(fq1, fq2, log=log)
def run_trim_galore(fq1, run_id, log, out, fq2=None, cores=1, flags=[]):
mccutils.mkdir(out+"/results/")
command = ['trim_galore'] + flags + ["-j", str(cores), "-o", out+"/results/trimgalore"]
if fq2 is None:
command.append(fq1)
else:
command += [fq1, fq2]
mccutils.run_command(command, log=log)
if fq2 is None:
outfq = ""
for f in os.listdir(out+"/results/trimgalore"):
if "_trimmed.fq" in f:
outfq = out+"/results/trimgalore/"+f
file_exists = mccutils.check_file_exists(outfq)
return outfq
else:
outfq1 = ""
outfq2 = ""
for f in os.listdir(out+"/results/trimgalore"):
if "_val_1.fq" in f:
outfq1 = out+"/results/trimgalore/"+f
elif "_val_2.fq" in f:
outfq2= out+"/results/trimgalore/"+f
file_exists = mccutils.check_file_exists(outfq1)
file_exists = mccutils.check_file_exists(outfq2)
return outfq1, outfq2
def run_multiqc(trimgalore_dir):
os.chdir(trimgalore_dir)
mccutils.run_command(["multiqc","."])
if __name__ == "__main__":
main()
| 32.056818 | 200 | 0.619107 | 106 | 0.018788 | 0 | 0 | 0 | 0 | 0 | 0 | 1,233 | 0.21854 |
ecfb9de92beb70f7a442a13faf05ee0506dba658 | 1,521 | py | Python | exemplo.py | efbrasil/B3LOB | 228962f66f8b3118c1a6aac703960c7c21ace097 | [
"MIT"
] | null | null | null | exemplo.py | efbrasil/B3LOB | 228962f66f8b3118c1a6aac703960c7c21ace097 | [
"MIT"
] | null | null | null | exemplo.py | efbrasil/B3LOB | 228962f66f8b3118c1a6aac703960c7c21ace097 | [
"MIT"
] | null | null | null | from B3LOB import Lob
import numpy as np
import os
from datetime import datetime
import matplotlib.pyplot as plt
lob = Lob(datadir='/home/eduardo/MarketData/')
fnames = ['OFER_CPA_20191127.gz', 'OFER_VDA_20191127.gz']
lob.read_orders_from_files('PETR3', fnames)
lob.set_snapshot_freq(60)
lob.process_orders()
snap_times = [e[0] for e in lob.snapshots]
idx_15h = snap_times.index(datetime(2019, 11, 27, 15, 0))
snap_15h = lob.snapshots[idx_15h][1]
size = 1000
bb = snap_15h['buy_snapshot']['book']
buy_idx = np.where(np.cumsum(bb[0,:]) > size)[0][0] + 1
buy_sizes = bb[0,0:buy_idx] * 100
buy_sizes_cum = np.cumsum(buy_sizes)
buy_prices = bb[1,0:buy_idx] / 100
sb = snap_15h['sell_snapshot']['book']
sell_idx = np.where(np.cumsum(sb[0, :]) > size)[0][0] + 1
sell_sizes = sb[0, 0:sell_idx] * 100
sell_sizes_cum = np.cumsum(sell_sizes)
sell_prices = sb[1, 0:sell_idx] / 100
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(16, 6))
ax1.bar(buy_prices, buy_sizes, label='Ordens de Compra',
color='steelblue', width=0.008)
ax1.bar(sell_prices, sell_sizes, label='Ordens de Venda',
color='tomato', width=0.008)
ax1.legend(loc='lower left')
ax1.set_title('Livro de ordens da PETR3 às 15h')
ax2.bar(buy_prices, buy_sizes_cum,
label='Ordens de Compra',
color='steelblue', width=0.008)
ax2.bar(sell_prices, sell_sizes_cum,
label='Ordens de Venda',
color='tomato', width=0.008)
ax2.legend(loc='lower left')
ax2.set_title('Livro de ordens (acumulado) da PETR3 às 15h')
fig.show()
| 29.823529 | 60 | 0.70217 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 331 | 0.217334 |
ecfc0d21e03b429f3f05fc69177ca324657bae11 | 52,954 | py | Python | siptrackweb/forms.py | lalusvipi/siptrackweb | e4d2882595a40d3dbb1bb74c1838937988726d8e | [
"BSD-2-Clause"
] | 38 | 2015-03-18T08:05:35.000Z | 2021-11-09T10:57:54.000Z | siptrackweb/forms.py | lalusvipi/siptrackweb | e4d2882595a40d3dbb1bb74c1838937988726d8e | [
"BSD-2-Clause"
] | 29 | 2015-06-25T11:28:07.000Z | 2019-11-30T21:15:30.000Z | siptrackweb/forms.py | lalusvipi/siptrackweb | e4d2882595a40d3dbb1bb74c1838937988726d8e | [
"BSD-2-Clause"
] | 13 | 2015-03-18T06:57:46.000Z | 2021-06-22T10:38:49.000Z | from django import forms
class EmptyForm(forms.Form):
pass
class LoginForm(forms.Form):
username = forms.CharField(
max_length=50,
label='Username'
)
password = forms.CharField(
max_length=32,
label='Password',
widget=forms.PasswordInput(),
required=True
)
class DeleteForm(forms.Form):
verify = forms.CharField(
initial='true',
widget=forms.HiddenInput()
)
class ConfirmForm(forms.Form):
verify = forms.CharField(
initial='true',
widget=forms.HiddenInput()
)
class ViewAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class ViewUpdateForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class ViewSearchForm(forms.Form):
searchstring = forms.CharField(
max_length=50,
required=True,
widget=forms.TextInput(attrs={'id': 'searchbox'})
)
class ViewAdvancedSearchForm(forms.Form):
searchAttribute = forms.CharField(
max_length=50,
required=True
)
searchValue = forms.CharField(
max_length=50,
required=False
)
attributesList = forms.CharField(
max_length=256,
required=False
)
OPTIONS = (
('devices', 'devices'),
('device categories', 'device categories'),
('passwords', 'passwords'),
('password categories', 'password categories'),
('networks', 'networks')
)
displayTypes = forms.MultipleChoiceField(
choices=OPTIONS,
required=False
)
class NetworkTreeAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
protocol = forms.ChoiceField(
label='Protocol',
choices=(('ipv4', 'ipv4'), ('ipv6', 'ipv6'))
)
class NetworkAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Address',
help_text='The network/address in CIDR form (x.x.x.x or x.x.x.x/xx)'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class NetworkRangeAddForm(forms.Form):
range = forms.CharField(
max_length=50,
label='Range'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class NetworkDeleteForm(forms.Form):
recursive = forms.BooleanField(
label='Recursive delete',
required=False
)
class PasswordKeyAddForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
key = forms.CharField(
max_length=32,
label='Key',
widget=forms.PasswordInput(),
required=False
)
validate = forms.CharField(
max_length=32,
label='Key (again)',
widget=forms.PasswordInput(),
required=False
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class CounterAddBasicForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
class CounterAddLoopingForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
values = forms.CharField(
max_length=5000,
label='Values',
help_text='one value per row',
widget=forms.Textarea(attrs={'cols':'30', 'rows': '5'})
)
class CounterUpdateBasicForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
value = forms.DecimalField(
min_value=0,
decimal_places=0,
label='Value'
)
class CounterUpdateLoopingForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name'
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
value = forms.CharField(
max_length=50,
label='Value'
)
values = forms.CharField(
max_length=5000,
label='Values',
help_text='one value per row',
widget=forms.Textarea(attrs={'cols':'30', 'rows': '5'})
)
class CounterSetForm(forms.Form):
value = forms.DecimalField(
min_value=0,
decimal_places=0,
label='Value'
)
class PasswordAddForm(forms.Form):
pw_username = forms.CharField(
max_length=50,
label='Username',
required=False
)
pw_password = forms.CharField(
max_length=250,
label='Password',
widget=forms.PasswordInput(),
required=False,
help_text='Max length: 250, leave empty for generated password.'
)
validate = forms.CharField(
max_length=250,
label='Password (again)',
widget=forms.PasswordInput(),
required=False
)
description = forms.CharField(
max_length=100,
required=False,
label='Description'
)
def __init__(self, password_keys, *args, **kwargs):
super(PasswordAddForm, self).__init__(*args, **kwargs)
keylist = [('__no-password-key__', 'None')]
for key in password_keys:
value = (key.oid, key.attributes['name'])
if key.attributes.get('default', False) is True:
keylist.insert(0, value)
else:
keylist.append(value)
field = forms.ChoiceField(
label='Password key',
choices=keylist
)
self.fields['passwordkey'] = field
class PasswordUpdateForm(forms.Form):
pw_username = forms.CharField(max_length = 50, label = 'Username',
required = False)
pw_password = forms.CharField(max_length = 250, label = 'Password',
widget = forms.PasswordInput(), required = False,
help_text = 'Max length: 250, leave empty for generated password.')
validate = forms.CharField(max_length = 250, label = 'Password (again)',
widget = forms.PasswordInput(), required = False)
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
def __init__(self, password_keys, *args, **kwargs):
super(PasswordUpdateForm, self).__init__(*args, **kwargs)
keylist = [('__no-password-key__', 'None')]
for key in password_keys:
value = (key.oid, key.attributes['name'])
if key.attributes.get('default', False) is True:
keylist.insert(0, value)
else:
keylist.append(value)
field = forms.ChoiceField(label = 'Password key', choices = keylist)
self.fields['passwordkey'] = field
class DeviceTemplateAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
device_creation = forms.BooleanField(label = 'Device creation',
required = False,
initial = False,
help_text = 'Template is used for device creation.')
def __init__(self, templates, *args, **kwargs):
super(DeviceTemplateAddForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class NetworkTemplateAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
def __init__(self, templates, *args, **kwargs):
super(NetworkTemplateAddForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class DeviceTemplateUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
device_creation = forms.BooleanField(label = 'Device creation',
required = False,
initial = False,
help_text = 'Template is used for device creation.')
def __init__(self, templates, *args, **kwargs):
super(DeviceTemplateUpdateForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class NetworkTemplateUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this template.', required = False)
inheritance_only = forms.BooleanField(label = 'Inheritance only',
required = False,
initial = False,
help_text = 'Template is used for inheritance only.')
def __init__(self, templates, *args, **kwargs):
super(NetworkTemplateUpdateForm, self).__init__(*args, **kwargs)
choices = []
for template in templates:
choices.append((template.oid,
template.attributes.get('name', '[UKNOWN]')))
field = forms.MultipleChoiceField(required = False,
label = 'Inherited templates',
choices = choices)
self.fields['inherited_templates'] = field
class TemplateRuleTextAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
hidden = forms.BooleanField(label = 'Hide attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will hidden per default if it is large/wikitext.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
large = forms.BooleanField(label = 'Large attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will have a separate display box.')
wikitext = forms.BooleanField(label = 'Wikitext attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed using wikitext parsing, implies "large".')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
class TemplateRuleFixedAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
string_value = forms.CharField(max_length = 100, label = 'String value',
help_text = 'The created attributes value.')
variable_expansion = forms.BooleanField(label = 'Expand variables',
required = False,
initial = False)
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
class TemplateRuleRegmatchAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
regexp = forms.CharField(max_length = 50, label = 'Regexp',
help_text = 'Regular expression that must match the input value.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
class TemplateRuleBoolAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
default = forms.ChoiceField(label = 'Default',
choices = (('true', 'True'), ('false', 'False')),
help_text = 'Default value for attribute.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
class TemplateRuleIntAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to create.')
default = forms.IntegerField(label = 'Default',
initial = 0,
help_text = 'Default value.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'Number of stored versions of the attribute.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
initial = False,
help_text = 'If true, the attribute will be displayed on the device/entity overview page.')
class TemplateRuleDeleteAttributeAddForm(forms.Form):
attr_name = forms.CharField(max_length = 50, label = 'Attribute name',
help_text = 'Name of attribute to delete.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
class TemplateRuleFlushNodesAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, node_types, *args, **kwargs):
super(TemplateRuleFlushNodesAddForm, self).__init__(*args, **kwargs)
choices = []
for node_type in node_types:
choices.append((node_type, node_type))
field = forms.MultipleChoiceField(required = False,
label = 'Included node types',
choices = choices,
help_text = 'If no node types are chosen for include, all types will match.')
self.fields['include'] = field
field = forms.MultipleChoiceField(required = False,
label = 'Excluded node types',
choices = choices)
self.fields['exclude'] = field
class TemplateRuleFlushAssociationsAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, node_types, *args, **kwargs):
super(TemplateRuleFlushAssociationsAddForm, self).__init__(*args, **kwargs)
choices = []
for node_type in node_types:
choices.append((node_type, node_type))
field = forms.MultipleChoiceField(required = False,
label = 'Included node types',
choices = choices,
help_text = 'If no node types are chosen for include, all types will match.')
self.fields['include'] = field
field = forms.MultipleChoiceField(required = False,
label = 'Excluded node types',
choices = choices)
self.fields['exclude'] = field
class TemplateRulePasswordAddForm(forms.Form):
username = forms.CharField(max_length = 50, label = 'Username',
required = False)
passwd_description = forms.CharField(max_length = 50, label = 'Description',
required = False, help_text = 'Description of the added password.')
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, password_keys, *args, **kwargs):
super(TemplateRulePasswordAddForm, self).__init__(*args, **kwargs)
keylist = [('__no-password-key__', 'None')]
for key in password_keys:
keylist.append((key.oid, key.attributes['name']))
field = forms.ChoiceField(label = 'Password key', choices = keylist)
self.fields['passwordkey'] = field
class TemplateRuleSubdeviceAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
num_devices = forms.IntegerField(label = 'Number of subdevices',
min_value = 1, initial = 1,
help_text = 'Number of subdevices to create.')
sequence_offset = forms.IntegerField(label = 'Sequence offset',
initial = 0,
help_text = 'Base offset of sequence counter used when applying subdevice templates.')
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
def __init__(self, templates, *args, **kwargs):
super(TemplateRuleSubdeviceAddForm, self).__init__(*args, **kwargs)
templatelist = [('none', 'None')]
for template in templates:
templatelist.append((template.oid, template.attributes['name']))
field = forms.ChoiceField(label = 'Template', choices = templatelist)
self.fields['template'] = field
class TemplateRuleAssignNetworkAddForm(forms.Form):
description = forms.CharField(max_length = 80, label = 'Description',
help_text = 'Description of this rule.', required = False)
priority = forms.IntegerField(label = 'Priority',
min_value = 0, initial = 10,
help_text = 'The priority of this rule when using the templates, lower value will be displayed first.')
class NetworkAttributeAddSelectTypeForm(forms.Form):
ruletype = forms.ChoiceField(label = 'Attribute type',
choices = (('text', 'text'),
('bool','boolean')))
class AttributeAddSelectTypeForm(forms.Form):
ruletype = forms.ChoiceField(label = 'Attribute type',
choices = (
('text', 'text'),
('bool', 'boolean'),
('int', 'int')
))
class AttributeUpdateTextForm(forms.Form):
value = forms.CharField(max_length = 50, label = 'New value',
required = False)
class AttributeUpdateBoolForm(forms.Form):
value = forms.BooleanField(label = 'New value (true/false)',
required = False)
class AttributeUpdateIntForm(forms.Form):
value = forms.IntegerField(label = 'New value', initial = 0)
class AttributeUpdateLargeTextForm(forms.Form):
def __init__(self, attribute, *args, **kwargs):
super(AttributeUpdateLargeTextForm, self).__init__(*args, **kwargs)
field = forms.CharField(
max_length=5000,
label=attribute.name,
initial=attribute.value,
required=False,
widget=forms.Textarea(attrs={'cols':'100', 'rows': '20'})
)
self.fields['value'] = field
class AttributeAddTextForm(forms.Form):
name = forms.CharField(
max_length=50,
label='Name',
widget=forms.TextInput(
attrs={
'placeholder': 'Name'
}
)
)
value = forms.CharField(
max_length=50,
label='Value',
required=False,
widget=forms.TextInput(
attrs={
'placeholder': 'Value'
}
)
)
ruletype = forms.CharField(
initial='text',
widget=forms.HiddenInput()
)
large = forms.BooleanField(
label='Large attribute',
required=False,
help_text='Attribute will have a separate display box.'
)
wikitext = forms.BooleanField(
label='Wikitext attribute',
required=False,
help_text='Attribute will be displayed using textile wikitext parsing, implies "large".'
)
hidden = forms.BooleanField(
label='Hidden attribute',
required=False,
help_text='Attribute will hidden per default if it is large/wikitext.'
)
important = forms.BooleanField(
label='Important attribute',
required=False,
help_text='Attribute will be displayed on a device/entities overview page.'
)
versions = forms.IntegerField(
label='Versions',
min_value=1,
initial=1,
help_text='If set to > 1 a versioned attribute will be created.'
)
class PasswordAttributeAddTextForm(AttributeAddTextForm):
encrypted = forms.BooleanField(
label='Encrypted attribute',
required=False,
help_text='Attribute will be encrypted using the same key as the parent password.'
)
class AttributeAddBoolForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
value = forms.ChoiceField(label = 'Value',
choices = (('true', 'True'), ('false', 'False')))
ruletype = forms.CharField(initial = 'bool',
widget = forms.HiddenInput())
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'If set to > 1 a versioned attribute will be created.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
help_text = 'If true, the attribute will be displayed on a device/entities overview page.')
class AttributeAddIntForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
value = forms.IntegerField(label = 'Integer Value', initial = 0)
ruletype = forms.CharField(initial = 'int',
widget = forms.HiddenInput())
versions = forms.IntegerField(label = 'Versions',
min_value = 1, initial = 1,
help_text = 'If set to > 1 a versioned attribute will be created.')
important = forms.BooleanField(label = 'Important attribute',
required = False,
help_text = 'If true, the attribute will be displayed on a device/entities overview page.')
class DeviceCategoryAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
class DeviceCategoryUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
class TemplateSelectForm(forms.Form):
def __init__(self, templates, permit_none = True, *args, **kwargs):
super(TemplateSelectForm, self).__init__(*args, **kwargs)
tmpllist = []
if permit_none:
tmpllist.append((-1, 'None'))
for template in templates:
tmpllist.append((template.oid,
template.attributes.get('name', '[UNKNOWN]')))
field = forms.ChoiceField(label = 'Select template',
choices = tmpllist)
self.fields['template'] = field
class TemplateSetForm(forms.Form):
def __init__(self, template, *args, **kwargs):
super(TemplateSetForm, self).__init__(*args, **kwargs)
rules = list(template.combinedRules())
rules.sort(cmp=lambda x,y: cmp(x.attributes.get('priority', 10), y.attributes.get('priority', 10)))
for rule in rules:
field = None
if rule.class_name == 'template rule text':
wikitext = rule.attributes.get('wikitext', False)
if not wikitext:
field = forms.CharField(max_length = 50,
label = rule.attr_name,
required = False,
help_text = rule.attributes.get('description', None))
elif rule.class_name == 'template rule regmatch':
if rule.attributes.get('description', None):
help_text = '%s (must match: %s)' % (
rule.attributes.get('description'),
rule.regexp
)
else:
help_text = 'Must match: "%s"' % (rule.regexp)
field = forms.RegexField(max_length = 50,
label = rule.attr_name,
regex = rule.regexp, required = False,
help_text = help_text)
elif rule.class_name == 'template rule bool':
field = forms.BooleanField(label = rule.attr_name,
required = False,
initial = rule.default_value,
help_text = rule.attributes.get('description', None))
elif rule.class_name == 'template rule int':
field = forms.IntegerField(label = rule.attr_name,
initial = rule.default_value,
help_text = rule.attributes.get('description', None))
elif rule.class_name == 'template rule subdevice':
field = forms.IntegerField(label = 'Number of subdevices',
required = False,
initial = rule.num_devices,
help_text = rule.attributes.get('description', None))
if field:
self.fields['argument-%s' % (rule.oid)] = field
for rule in template.combinedRules():
if rule.class_name in [
'template rule regmatch', 'template rule bool',
'template rule int', 'template rule subdevice']:
continue
if rule.class_name == 'template rule text':
wikitext = rule.attributes.get('wikitext', False)
if wikitext:
field = forms.CharField(max_length = 50,
label = rule.attr_name,
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', None))
else:
continue
elif rule.class_name == 'template rule password':
initial = ''
if rule.username:
initial = '%s' % (rule.username)
else:
initial = '[no username]'
if rule.description:
initial = '%s - %s' % (initial, rule.description)
field = forms.CharField(label = 'Add password',
required = False,
initial = initial,
widget=forms.TextInput(attrs={'readonly':'readonly'}),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule assign network':
field = forms.CharField(label = 'Auto-assign ip-address',
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule fixed':
field = forms.CharField(label = rule.attr_name,
required = False,
initial = rule.value,
widget=forms.TextInput(attrs={'readonly':'readonly'}),
help_text = rule.attributes.get('description', ''))
apply_label = 'Add attribute %s = %s' % (rule.attr_name, rule.value)
elif rule.class_name == 'template rule flush nodes':
field = forms.CharField(label = 'Flush existing nodes',
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule flush associations':
field = forms.CharField(label = 'Flush existing associations',
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
elif rule.class_name == 'template rule delete attribute':
field = forms.CharField(label = 'Delete attribute',
required = False,
initial = rule.attr_name,
widget=forms.TextInput(attrs={'readonly':'readonly'}),
help_text = rule.attributes.get('description', ''))
else:
field = forms.CharField(label = rule.class_name,
required = False,
widget = forms.HiddenInput(),
help_text = rule.attributes.get('description', ''))
self.fields['argument-%s' % (rule.oid)] = field
# field = forms.BooleanField(label = 'Overwrite',
# required = False,
# initial = True,
# help_text = 'Overwrite existing attributes that have the same name as an attribute being created.')
# self.fields['overwrite'] = field
# self.fields['template'] = forms.CharField(initial = template.oid,
# widget = forms.HiddenInput())
class DeviceSetValuesForm(forms.Form):
def __init__(self, rules, *args, **kwargs):
super(DeviceSetValuesForm, self).__init__(*args, **kwargs)
for rule in rules:
is_wikitext = rule.attributes.get('wikitext', False)
if rule.dtype == 'text' and not is_wikitext:
field = forms.CharField(max_length = 50, label = rule.name,
required = False,
help_text = rule.attributes.get('description', None))
self.fields['attr-%s' % (rule.oid)] = field
elif rule.dtype == 'text' and is_wikitext:
widget = forms.HiddenInput()
field = forms.CharField(label = rule.name, widget = widget,
initial = ' ')
self.fields['attr-%s' % (rule.oid)] = field
elif rule.dtype == 'regmatch':
field = forms.RegexField(max_length = 50, label = rule.name,
regex = rule.value, required = False,
help_text = 'Must match: "%s"' % (rule.value))
self.fields['attr-%s' % (rule.oid)] = field
# elif rule.dtype == 'fixed':
# widget = forms.HiddenInput()
# field = forms.CharField(max_length = 50, label = rule.name,
# widget = widget, initial = rule.value)
# self.fields['attr-%s' % (rule.oid)] = field
if rule.dtype == 'bool':
field = forms.BooleanField(label = rule.name, required = False,
initial = rule.attributes.get('default', True),
help_text = rule.attributes.get('description', None))
self.fields['attr-%s' % (rule.oid)] = field
else:
pass
class DeviceNetworkAddForm(forms.Form):
def __init__(self, network_trees, *args, **kwargs):
super(DeviceNetworkAddForm, self).__init__(*args, **kwargs)
nt_choices = []
for tree in network_trees:
value = (tree.oid, tree.attributes.get('name', '[UNKNOWN]'))
if tree.attributes.get('default', False) is True:
nt_choices.insert(0, value)
else:
nt_choices.append(value)
field = forms.ChoiceField(label = 'Network Tree',
choices = nt_choices,
help_text = 'Network tree for address.')
self.fields['networktree'] = field
self.fields['network_name'] = \
forms.CharField(max_length = 50, label = 'IP-Address',
help_text = 'Valid forms: host: "a.b.c.d", '
'cidr subnet: "a.b.c.d/nn"')
self.fields['description'] = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
class UserAddForm(forms.Form):
user_name = forms.CharField(max_length = 50, label = 'User Name')
real_name = forms.CharField(max_length = 50, label = 'Real Name (optional)',
required = False)
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
administrator = forms.BooleanField(label = 'Administrator',
required = False,
initial = False)
password = forms.CharField(max_length = 32, label = 'Password',
widget = forms.PasswordInput(), required = True)
validate = forms.CharField(max_length = 32, label = 'Password (again)',
widget = forms.PasswordInput(), required = True)
class UserUpdateAdminForm(forms.Form):
user_name = forms.CharField(max_length = 50, label = 'User Name')
real_name = forms.CharField(max_length = 50, label = 'Real Name (optional)',
required = False)
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
administrator = forms.BooleanField(label = 'Administrator',
required = False,
initial = False)
class UserUpdateForm(forms.Form):
user_name = forms.CharField(max_length = 50, label = 'User Name')
real_name = forms.CharField(max_length = 50, label = 'Real Name (optional)',
required = False)
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
class UserResetPasswordForm(forms.Form):
password = forms.CharField(max_length = 32, label = 'Password',
widget = forms.PasswordInput(), required = False,
help_text = 'Reseting the password for a user will disconnect all subkeys etc. Use this if the old password for the user is unknown.')
validate = forms.CharField(max_length = 32, label = 'Password (again)',
widget = forms.PasswordInput(), required = False)
class UserUpdatePasswordForm(forms.Form):
password = forms.CharField(max_length = 32, label = 'New Password',
widget = forms.PasswordInput(), required = False)
validate = forms.CharField(max_length = 32, label = 'New Password (again)',
widget = forms.PasswordInput(), required = False)
old_password = forms.CharField(max_length = 32, label = 'Old Password',
widget = forms.PasswordInput(), required = False,
help_text = 'Needs to be supplied if you are changing the password of a user other than your own.')
class UserConnectKeyForm(forms.Form):
password_key_key = forms.CharField(max_length = 32, label = 'Password key password',
widget = forms.PasswordInput(), required = False,
help_text = 'Required if the current active user doesn\'t have the selected password key connected.')
def __init__(self, password_keys, require_user_password, *args, **kwargs):
super(UserConnectKeyForm, self).__init__(*args, **kwargs)
self.message = '''
If you're connecting a password key for another user, keep in mind; that
user must logout and login to siptrack before the key will be connected.
'''
keylist = []
for key in password_keys:
value = (key.oid, key.attributes['name'])
if key.attributes.get('default', False) is True:
keylist.insert(0, value)
else:
keylist.append(value)
field = forms.ChoiceField(label = 'Password key', choices = keylist)
self.fields['passwordkey'] = field
if require_user_password:
field = forms.CharField(
max_length=32,
label='User\'s password',
help_text='Required to create the users keypair if they\'ve never logged in before.',
widget=forms.PasswordInput(),
required=False
)
self.fields['user_password'] = field
class UserManagerLocalAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
class UserManagerLDAPAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
connection_type = forms.ChoiceField(label = 'Connection type',
choices = (('ldap', 'ldap'), ('ldaps', 'ldaps')))
server = forms.CharField(max_length = 256, label = 'LDAP server')
port = forms.CharField(max_length = 5, label = 'LDAP server port')
base_dn = forms.CharField(max_length = 128, label = 'Base DN')
valid_groups = forms.CharField(max_length = 1000, label = 'Valid LDAP group',
help_text = 'Only members of the given group will be able to log in, use ":" to seperate groups.',
required = False)
class UserManagerActiveDirectoryAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
server = forms.CharField(max_length = 256, label = 'AD server')
base_dn = forms.CharField(max_length = 128, label = 'Base DN')
valid_groups = forms.CharField(max_length = 1000, label = 'Valid LDAP group',
help_text = 'Only members of the given group will be able to log in, use ":" to seperate groups.',
required = False)
user_domain = forms.CharField(max_length = 128, label = 'User Domain')
class DeviceResetForm(forms.Form):
reset_attributes = forms.BooleanField(label = 'Reset attributes',
required = False,
initial = True)
reset_device_links = forms.BooleanField(label = 'Reset device links',
required = False,
initial = False)
reset_passwords = forms.BooleanField(label = 'Reset passwords',
required = False,
initial = True)
reset_subdevices = forms.BooleanField(label = 'Reset subdevices',
required = False,
initial = True)
class ConfigAddSelectTypeForm(forms.Form):
def __init__(self, parent, *args, **kwargs):
super(ConfigAddSelectTypeForm, self).__init__(*args, **kwargs)
choices = []
if parent.class_name not in ['view tree', 'ipv4 network',
'ipv6 network', 'network tree', 'ipv4 network range',
'ipv6 network range']:
choices.append(('netautoassign', 'Network auto assignment'))
choices.append(('value', 'Config value'))
field = forms.ChoiceField(label = 'Config type', choices = choices)
self.fields['config_type'] = field
class ConfigAddNetworkAutoassignForm(forms.Form):
config_type = forms.CharField(initial = 'netautoassign',
widget = forms.HiddenInput())
def __init__(self, network_trees, *args, **kwargs):
super(ConfigAddNetworkAutoassignForm, self).__init__(*args, **kwargs)
nt_choices = []
for tree in network_trees:
value = (tree.oid, tree.attributes.get('name', '[UNKNOWN]'))
if tree.attributes.get('default', False) is True:
nt_choices.insert(0, value)
else:
nt_choices.append(value)
field = forms.ChoiceField(label = 'Network Tree',
choices = nt_choices,
help_text = 'Network tree for address.')
self.fields['networktree'] = field
self.fields['range_start'] = \
forms.CharField(max_length = 50, label = 'Range Start',
help_text = 'Enter the start address of the range used for assignment"')
self.fields['range_end'] = \
forms.CharField(max_length = 50, label = 'Range End',
help_text = 'Enter the end address of the range used for assignment"')
class ConfigAddValueForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
value = forms.CharField(max_length = 50, label = 'Value', required = False)
config_type = forms.CharField(initial = 'value',
widget = forms.HiddenInput())
class PermissionAddForm(forms.Form):
read_access = forms.BooleanField(label = 'Read access',
required = False)
write_access = forms.BooleanField(label = 'Write access',
required = False)
all_users = forms.BooleanField(label = 'Applies to all users',
required = False)
recursive = forms.BooleanField(label = 'Recursive',
required = False,
help_text = 'Applies recursively up the node tree.')
def __init__(self, users, groups, *args, **kwargs):
super(PermissionAddForm, self).__init__(*args, **kwargs)
field = forms.MultipleChoiceField(required = False,
label = 'Users',
choices = users,
help_text = 'Included users.')
self.fields['users'] = field
field = forms.MultipleChoiceField(required = False,
label = 'Groups',
choices = groups,
help_text = 'Included groups.')
self.fields['groups'] = field
class UserGroupAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description',
required = False)
def __init__(self, users, *args, **kwargs):
super(UserGroupAddForm, self).__init__(*args, **kwargs)
field = forms.MultipleChoiceField(required = False,
label = 'Users',
choices = users,
help_text = 'Included users.')
self.fields['users'] = field
class UserGroupUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 50, label = 'Description (optional)',
required = False)
def __init__(self, users, *args, **kwargs):
super(UserGroupUpdateForm, self).__init__(*args, **kwargs)
field = forms.MultipleChoiceField(required = False,
label = 'Users',
choices = users,
help_text = 'Included users.')
self.fields['users'] = field
class CommandAddForm(forms.Form):
freetext = forms.CharField(max_length = 200, label = 'Command text')
class CommandUpdateForm(forms.Form):
freetext = forms.CharField(max_length = 200, label = 'Command text')
class CommandQueueAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class CommandQueueUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class EventTriggerAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class EventTriggerUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
class EventTriggerRulePythonAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
code = forms.CharField(max_length = 5000, label = 'Code',
help_text = 'python code',
widget = forms.Textarea(attrs={'cols':'80', 'rows': '50'}))
class EventTriggerRulePythonUpdateForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
code = forms.CharField(max_length = 5000, label = 'Code',
help_text = 'python code',
widget = forms.Textarea(attrs={'cols':'80', 'rows': '50'}))
class UsermanagerADSyncUsersForm(forms.Form):
username = forms.CharField(max_length = 50, label = 'Username')
password = forms.CharField(max_length = 32, label = 'Password',
widget = forms.PasswordInput(), required = True)
class PasswordCategoryAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
class DeviceCopyForm(forms.Form):
skip_attributes = forms.BooleanField(label = 'Skip attributes',
required = False, initial = False)
skip_devices = forms.BooleanField(label = 'Skip sub-devices',
required = False, initial = False)
skip_networks = forms.BooleanField(label = 'Skip networks',
required = False, initial = True)
class AttributeEditNotesForm(forms.Form):
notes = forms.CharField(max_length = 50000, label = '',
help_text = '',
required = False,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
class AttributeQuickeditForm(forms.Form):
value = forms.CharField(max_length = 100, required = False,
label = 'Value')
class RackUnitOccupiedForm(forms.Form):
reason = forms.CharField(max_length = 500, required = False,
label = 'Reason',
help_text = 'Describe what is occupying this unit.')
class RackUnitReservedForm(forms.Form):
reason = forms.CharField(max_length = 500, required = False,
label = 'Reason',
help_text = 'Describe why this unit is reserved.')
class DeviceConfigAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
max_versions = forms.IntegerField(label = 'Retained versions',
min_value = 0, initial = 10,
help_text = 'The number of config versions to retain, set to 0 for unlimited.')
class DeviceConfigSubmitForm(forms.Form):
data = forms.CharField(max_length = 1000000, label = '',
help_text = '',
required = True,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
class DeviceConfigTemplateAddForm(forms.Form):
name = forms.CharField(max_length = 50, label = 'Name')
description = forms.CharField(max_length = 100, required = False,
label = 'Description')
data = forms.CharField(max_length = 1000000, label = '',
help_text = '',
required = True,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
class DeviceConfigTemplateSubmitForm(forms.Form):
data = forms.CharField(max_length = 1000000, label = '',
help_text = '',
required = True,
widget = forms.Textarea(attrs={'cols':'100', 'rows': '15'}))
| 43.981728 | 146 | 0.587321 | 52,345 | 0.988499 | 0 | 0 | 0 | 0 | 0 | 0 | 11,209 | 0.211674 |
ecfc0da0eb91cc8679b3822949206f2f4d0e30e7 | 1,117 | py | Python | azure_utility_tool/actions/list_all_users.py | alextricity25/azure_utility_tool | 2975b5f415e5c64335618e83ed0216b7923c4166 | [
"MIT"
] | 5 | 2020-01-02T03:12:14.000Z | 2020-08-19T02:31:19.000Z | azure_utility_tool/actions/list_all_users.py | alextricity25/azure_utility_tool | 2975b5f415e5c64335618e83ed0216b7923c4166 | [
"MIT"
] | null | null | null | azure_utility_tool/actions/list_all_users.py | alextricity25/azure_utility_tool | 2975b5f415e5c64335618e83ed0216b7923c4166 | [
"MIT"
] | 2 | 2020-03-16T00:19:06.000Z | 2020-08-20T19:31:10.000Z | """
Author(s):
Miguel Alex Cantu (miguel.can2@gmail.com)
Date: 02/21/2020
Description:
This action will return several lines, with each line being a JSON
representation of the user
"""
from azure_utility_tool.utils import paginate
from azure_utility_tool.graph_endpoints import USER_GET_ENDPOINT
from azure_utility_tool.test_cases import TestCases
from azure_utility_tool.transformers import expand_onPremisesExtensionAttributes
def list_all_users(parsed_args, config, app):
"""
This action returns a dictionary of all the users indexed by
userPrincipalName
"""
user_data = []
paginate(
USER_GET_ENDPOINT,
user_data,
'value',
parsed_args,
config,
app,
test_data=TestCases().get_test_user_graph_data(),
std_output=False,
transformer=expand_onPremisesExtensionAttributes)
# Return a dictionary of all the users in the tenant, indexed by
# userPrincipalName
users = {}
for user in user_data:
users[user["userPrincipalName"]] = user
return users
| 29.394737 | 80 | 0.690242 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 403 | 0.360788 |
ecfc31482311ffe936e2fc1bff49cd4a0de2b2cb | 2,824 | py | Python | examples/example.py | jakevdp/Mmani | 681b6cdbd358b207e8b6c4a482262c84bea15bd7 | [
"BSD-2-Clause"
] | 303 | 2016-03-03T00:44:37.000Z | 2022-03-14T03:43:38.000Z | examples/example.py | jakevdp/Mmani | 681b6cdbd358b207e8b6c4a482262c84bea15bd7 | [
"BSD-2-Clause"
] | 52 | 2016-02-26T21:41:31.000Z | 2021-06-27T08:33:51.000Z | examples/example.py | jakevdp/Mmani | 681b6cdbd358b207e8b6c4a482262c84bea15bd7 | [
"BSD-2-Clause"
] | 67 | 2016-03-03T22:38:35.000Z | 2022-01-12T08:03:47.000Z | import sys
import numpy as np
import scipy as sp
import scipy.sparse as sparse
from megaman.geometry import Geometry
from sklearn import datasets
from megaman.embedding import (Isomap, LocallyLinearEmbedding,
LTSA, SpectralEmbedding)
# Generate an example data set
N = 10
X, color = datasets.samples_generator.make_s_curve(N, random_state=0)
# Geometry is the main class that will Cache things like distance, affinity, and laplacian.
# you instantiate the Geometry class with the parameters & methods for the three main components:
# Adjacency: an NxN (sparse) pairwise matrix indicating neighborhood regions
# Affinity an NxN (sparse) pairwise matrix insicated similarity between points
# Laplacian an NxN (sparse) pairwsie matrix containing geometric manifold information
radius = 5
adjacency_method = 'cyflann'
adjacency_kwds = {'radius':radius} # ignore distances above this radius
affinity_method = 'gaussian'
affinity_kwds = {'radius':radius} # A = exp(-||x - y||/radius^2)
laplacian_method = 'geometric'
laplacian_kwds = {'scaling_epps':radius} # scaling ensures convergence to Laplace-Beltrami operator
geom = Geometry(adjacency_method=adjacency_method, adjacency_kwds=adjacency_kwds,
affinity_method=affinity_method, affinity_kwds=affinity_kwds,
laplacian_method=laplacian_method, laplacian_kwds=laplacian_kwds)
# You can/should also use the set_data_matrix, set_adjacency_matrix, set_affinity_matrix
# to send your data set (in whichever form it takes) this way.
geom.set_data_matrix(X)
# You can get the distance, affinity etc with e.g: Geometry.get_distance_matrix()
# you can update the keyword arguments passed inially using these functions
adjacency_matrix = geom.compute_adjacency_matrix()
# by defualt this is pass-by-reference. Use copy=True to get a copied version.
# If you don't want to pre-compute a Geometry you can pass a dictionary or geometry
# arguments to one of the embedding classes.
geom = {'adjacency_method':adjacency_method, 'adjacency_kwds':adjacency_kwds,
'affinity_method':affinity_method, 'affinity_kwds':affinity_kwds,
'laplacian_method':laplacian_method, 'laplacian_kwds':laplacian_kwds}
# an example follows for creating each embedding into 2 dimensions.
n_components = 2
# LTSA
ltsa =LTSA(n_components=n_components, eigen_solver='arpack',
geom=geom)
embed_ltsa = ltsa.fit_transform(X)
# LLE
lle = LocallyLinearEmbedding(n_components=n_components, eigen_solver='arpack',
geom=geom)
embed_lle = lle.fit_transform(X)
# Isomap
isomap = Isomap(n_components=n_components, eigen_solver='arpack',
geom=geom)
embed_isomap = isomap.fit_transform(X)
# Spectral Embedding
spectral = SpectralEmbedding(n_components=n_components, eigen_solver='arpack',
geom=geom)
embed_spectral = spectral.fit_transform(X) | 40.927536 | 99 | 0.785411 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,393 | 0.493272 |
ecfcea4ccf7a5523cf5c703502dd1432cbaa0c9e | 3,545 | py | Python | oncopolicy/models/deterministic_progression.py | yala/Tempo | bf3e0e78d64869bb2079c582a4a35982f78386ad | [
"MIT"
] | 6 | 2022-01-15T11:57:19.000Z | 2022-02-13T21:15:22.000Z | oncopolicy/models/deterministic_progression.py | yala/Tempo | bf3e0e78d64869bb2079c582a4a35982f78386ad | [
"MIT"
] | null | null | null | oncopolicy/models/deterministic_progression.py | yala/Tempo | bf3e0e78d64869bb2079c582a4a35982f78386ad | [
"MIT"
] | 2 | 2022-02-02T13:09:29.000Z | 2022-02-18T07:06:19.000Z | import torch
import torch.nn as nn
from oncopolicy.models.factory import RegisterModel
import pdb
class AbstractDeterministicGuideline(nn.Module):
def __init__(self, args):
super(AbstractDeterministicGuideline, self).__init__()
self.args = args
self.max_steps = args.max_steps
def get_logprob(self, z):
z = z.unsqueeze(1)
return torch.log(torch.cat([1-z, z], dim =1))
def get_prob(self, z):
return z
@RegisterModel("last_observed_risk")
class LastObservedRisk(AbstractDeterministicGuideline):
'''
Deterministic risk progression model. Predict
observed risk doesnt change from last observation
'''
def __init__(self, args):
super(LastObservedRisk, self).__init__(args)
self.max_pool = nn.MaxPool1d(kernel_size=self.max_steps, stride=1)
def forward(self, x, batch):
'''
Forward func used in training/eval risk progression model.
args:
- x: tensor of shape [B, self.max_steps, args.risk_dimension], with 0s for unobserved
- batch: full batch obj, contains 'oberved tensor'
returns:
- z: tensor of shape [B, self.max_steps, args.risk_dimension], with last observed risk for each dim
'''
B, _, D = x.size()
obsereved_key = 'observed' if 'observed' in batch else 'progression_observed'
obs = batch[obsereved_key] # shape [B, self.max_steps]
indicies = torch.arange(start=0, end=self.max_steps).unsqueeze(0).expand([B,self.max_steps]).to(self.args.device)
obs_indicies = (obs.float() * indicies.float()).unsqueeze(1)
obs_indicies_w_pad = torch.cat([torch.zeros([B, 1, self.max_steps]).to(self.args.device), obs_indicies[:,:,:-1]], dim=-1)
indices_of_most_recent = self.max_pool(obs_indicies_w_pad).long().transpose(1,2).expand(B, self.max_steps, D)
z = torch.gather(x, dim=1, index=indices_of_most_recent)
return z, None
@RegisterModel("static_risk")
class StaticRisk(AbstractDeterministicGuideline):
'''
Deterministic risk progression model. Predict
observed risk doesnt change from first observation. Assume static
'''
def __init__(self, args):
super(StaticRisk, self).__init__(args)
def forward(self, x, batch):
'''
Forward func used in training/eval risk progression model.
args:
- x: tensor of shape [B, self.max_steps, args.risk_dimension], with 0s for unobserved
- batch: full batch obj, contains 'oberved tensor'
returns:
- z: tensor of shape [B, self.max_steps, args.risk_dimension], with last observed risk for each dim
'''
z = x[:,0,:].unsqueeze(1).expand_as(x).contiguous()
return z, None
@RegisterModel("random")
class Random(AbstractDeterministicGuideline):
'''
Predict rand risk at each timestep.
'''
def __init__(self, args):
super(Random, self).__init__(args)
def forward(self, x, batch):
'''
Forward func used in training/eval risk progression model.
args:
- x: tensor of shape [B, self.max_steps, args.risk_dimension], with 0s for unobserved
- batch: full batch obj, contains 'oberved tensor'
returns:
- z: tensor of shape [B, MAX_STEPS, args.risk_dimension], with last observed risk for each dim
'''
z = torch.sigmoid( torch.randn_like(x).to(x.device))
return z, None
| 37.315789 | 129 | 0.640621 | 3,343 | 0.943018 | 0 | 0 | 3,072 | 0.866573 | 0 | 0 | 1,616 | 0.455853 |
ecfeefcda553063068069aa70f84508ced468409 | 12,003 | py | Python | config/settings/base.py | sul-cidr/histonets-arch | 6105de90905d54db604b0606d517f53782aae16d | [
"MIT"
] | null | null | null | config/settings/base.py | sul-cidr/histonets-arch | 6105de90905d54db604b0606d517f53782aae16d | [
"MIT"
] | 19 | 2018-04-19T19:32:59.000Z | 2018-06-04T23:20:03.000Z | config/settings/base.py | sul-cidr/histonets-arch | 6105de90905d54db604b0606d517f53782aae16d | [
"MIT"
] | null | null | null | """
Base settings to build other settings files upon.
"""
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (histonets/config/settings/base.py - 3 = histonets/)
APPS_DIR = ROOT_DIR.path('histonets')
env = environ.Env()
READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False)
if READ_DOT_ENV_FILE:
# OS environment variables take precedence over variables from .env
env.read_env(str(ROOT_DIR.path('.env')))
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool('DJANGO_DEBUG', False)
# Local time zone. Choices are
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# though not all of them may be available with every OS.
# In Windows, this must be set to your system time zone.
TIME_ZONE = 'UTC'
# https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# DATABASES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': env.db('DATABASE_URL'),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# URLS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = 'config.urls'
# https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# APPS
# ------------------------------------------------------------------------------
DJANGO_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# 'django.contrib.humanize', # Handy template tags
'django.contrib.admin',
]
THIRD_PARTY_APPS = [
'crispy_forms',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.box',
'webpack_loader',
'health_check',
'health_check.db',
# 'health_check.cache',
# 'health_check.storage',
# 'health_check.contrib.celery', # requires celery
# 'health_check.contrib.psutil', # disk and memory utilization; requires psutil
# 'health_check.contrib.s3boto_storage', # requires boto and S3BotoStorage backend
]
LOCAL_APPS = [
'histonets.users.apps.UsersConfig',
'histonets.apps.HistonetsConfig',
'histonets.collections.apps.CollectionsConfig',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIGRATIONS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules
MIGRATION_MODULES = {
'sites': 'histonets.contrib.sites.migrations'
}
# AUTHENTICATION
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model
AUTH_USER_MODEL = 'users.User'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url
LOGIN_REDIRECT_URL = 'users:redirect'
# https://docs.djangoproject.com/en/dev/ref/settings/#login-url
LOGIN_URL = 'account_login'
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = [
# https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
]
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# MIDDLEWARE
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#middleware
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('static'))
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [
str(APPS_DIR.path('static')),
str(ROOT_DIR('assets'))
]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
# webpack loader
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'bundles/',
'STATS_FILE': str(ROOT_DIR('webpack-stats.json')),
}
}
# MEDIA
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(ROOT_DIR('media'))
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# IIIF
# ------------------------------------------------------------------------------
IIIF_DIR = 'iiif' # Relative to the default storage, e.g., /media/iiif
IIIF_CANONICAL_URI_PATTERN = "{}/iiif/2/{{}}/full/max/0/default.jpg"
IIIF_CANONICAL_URI = IIIF_CANONICAL_URI_PATTERN.format(env('CANTALOUPE_SERVER', default='http://localhost'))
IIIF_CANONICAL_CONTAINER_URI = None
if env('CANTALOUPE_CONTAINER_SERVER', default=False):
IIIF_CANONICAL_CONTAINER_URI = IIIF_CANONICAL_URI_PATTERN.format(
env('CANTALOUPE_CONTAINER_SERVER', default='http://localhost')
)
IIIF_IMAGE_FORMATS = ["jpg", "jpeg", "tif", "tiff", "gif", "png"]
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap4'
# FIXTURES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# ADMIN
# ------------------------------------------------------------------------------
# Django Admin URL regex.
ADMIN_URL = r'^admin/'
# https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = [
("""Center for Interdisciplinary Digital Research (CIDR)""", 'contact-cidr@stanford.edu'),
]
# https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# Celery
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['histonets.taskapp.celery.CeleryConfig']
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url
CELERY_BROKER_URL = env('CELERY_BROKER_URL', default='django://')
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend
if CELERY_BROKER_URL == 'django://':
CELERY_RESULT_BACKEND = 'redis://'
else:
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content
CELERY_ACCEPT_CONTENT = ['json']
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer
CELERY_TASK_SERIALIZER = 'json'
# http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ALWAYS_EAGER = False # set to True for emulation
CELERYD_TASK_TERMINATES_WORKER = True # custom option
CELERYD_MAX_TASKS_PER_CHILD = 1
# django-allauth
# ------------------------------------------------------------------------------
# https://django-allauth.readthedocs.io/en/latest/configuration.html
ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True)
ACCOUNT_AUTHENTICATION_METHOD = 'username_email'
ACCOUNT_EMAIL_REQUIRED = env.bool('DJANGO_ACCOUNT_EMAIL_REQUIRED', default=True)
ACCOUNT_EMAIL_VERIFICATION = env.bool('DJANGO_ACCOUNT_EMAIL_VERIFICATION', default="mandatory")
ACCOUNT_ADAPTER = 'histonets.users.adapters.AccountAdapter'
SOCIALACCOUNT_ADAPTER = 'histonets.users.adapters.SocialAccountAdapter'
# django-compressor
# ------------------------------------------------------------------------------
# https://django-compressor.readthedocs.io/en/latest/quickstart/#installation
INSTALLED_APPS += ['compressor']
STATICFILES_FINDERS += ['compressor.finders.CompressorFinder']
COMPRESS_PRECOMPILERS = (
('text/x-sass', 'sass {infile} {outfile}'),
)
# Histonets
# ------------------------------------------------------------------------------
| 40.550676 | 108 | 0.637591 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9,003 | 0.750062 |
ecffd858ad8cd1dedc6a42ad347e664e218138b6 | 10,706 | py | Python | groupman.py | yteraoka/googleapps-directory-tools | ea59f0602ddafcc850cf7e36f6021be0ee60c2a3 | [
"Apache-2.0"
] | 20 | 2015-02-20T04:58:17.000Z | 2020-12-30T23:43:29.000Z | groupman.py | yteraoka/googleapps-directory-tools | ea59f0602ddafcc850cf7e36f6021be0ee60c2a3 | [
"Apache-2.0"
] | 2 | 2015-03-02T14:33:25.000Z | 2017-09-20T11:23:19.000Z | groupman.py | yteraoka/googleapps-directory-tools | ea59f0602ddafcc850cf7e36f6021be0ee60c2a3 | [
"Apache-2.0"
] | 11 | 2015-03-02T14:16:01.000Z | 2021-10-03T14:28:10.000Z | #!/apps/python-2.7/bin/python
# -*- coding: utf-8 -*-
import os
import os.path
import glob
import sys
from apiclient.discovery import build
from apiclient.errors import HttpError
import httplib2
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client import tools
import argparse
import simplejson as json
import pprint
import codecs
import yaml
import re
from termcolor import colored
from const import *
from utils import *
GROUP_PARAMS = ['name', 'description', 'aliases', 'allowExternalMembers',
'allowGoogleCommunication',
'allowWebPosting', 'archiveOnly', 'customReplyTo',
'includeInGlobalAddressList', 'isArchived',
'maxMessageBytes', 'membersCanPostAsTheGroup',
'messageDisplayFont', 'messageModerationLevel',
'primaryLanguage', 'replyTo',
'sendMessageDenyNotification', 'showInGroupDirectory',
'spamModerationLevel', 'whoCanContactOwner',
'whoCanInvite', 'whoCanJoin', 'whoCanLeaveGroup',
'whoCanPostMessage', 'whoCanViewGroup',
'whoCanViewMembership']
class GaService(object):
def __init__(self, cred_path = CREDENTIALS_PATH):
storage = Storage(cred_path)
credentials = storage.get()
if credentials is None or credentials.invalid:
sys.exit(1)
http = httplib2.Http()
http = credentials.authorize(http)
sv1 = build('admin', 'directory_v1', http=http)
sv2 = build('groupssettings', 'v1', http=http)
self.service = {}
self.service['group'] = sv1.groups()
self.service['member'] = sv1.members()
self.service['settings'] = sv2.groups()
def group_sv(self):
return self.service['group']
def member_sv(self):
return self.service['member']
def settings_sv(self):
return self.service['settings']
def list_local_groups(self, domain, dir):
groups = []
for f in glob.glob("%s/*@%s.yml" % (dir, domain)):
email = os.path.splitext(os.path.basename(f))[0]
group_obj = GaGroup()
group_obj.set_group_key(email)
groups.append(group_obj)
return groups
def list_cloud_groups(self, domain):
groups = []
pageToken = None
while True:
params = { 'domain': domain }
if pageToken:
params['pageToken'] = pageToken
r = self.service['group'].list(**params).execute()
if r.has_key('groups'):
for group in r['groups']:
group_obj = GaGroup()
group_obj.set_group_key(group['email'])
groups.append(group_obj)
if r.has_key('nextPageToken'):
pageToken = r['nextPageToken']
else:
break
return groups
class GaGroup(object):
def __init__(self):
self.local_dir = '.'
self.local = {}
self.cloud = {}
self.group_key = None
def set_group_key(self, group_key):
self.group_key = group_key
def set_local_dir(self, local_dir):
self.local_dir = local_dir
def group_key(self):
return self.group_key
def load_cloud(self, sv):
r = sv.settings_sv().get(groupUniqueId=self.group_key).execute()
self.cloud = r
members = self.load_cloud_member(sv)
self.cloud['members'] = []
self.cloud['owners'] = []
self.cloud['managers'] = []
for member in members:
if member['role'] == 'MEMBER':
self.cloud['members'].append(member['email'])
elif member['role'] == 'MANAGER':
self.cloud['managers'].append(member['email'])
elif member['role'] == 'OWNER':
self.cloud['owners'].append(member['email'])
self.cloud['members'].sort()
self.cloud['owners'].sort()
self.cloud['managers'].sort()
r = sv.group_sv().get(groupKey=self.group_key).execute()
if r.has_key('aliases'):
self.cloud['aliases'] = r['aliases']
def load_cloud_member(self, sv):
members = []
pageToken = None
while True:
params = { 'groupKey': self.group_key }
if pageToken:
params['pageToken'] = pageToken
r = sv.member_sv().list(**params).execute()
if r.has_key('members'):
for member in r['members']:
members.append(member)
if r.has_key('nextPageToken'):
pageToken = r['nextPageToken']
else:
break
return members
def dump_data(self, data, stream):
stream.write("email: %s\n" % data['email'])
for key in GROUP_PARAMS:
if data.has_key(key):
if key in ['name', 'description']:
stream.write("%s: \"%s\"\n" % (key, re.sub(r'"', '\\"', data[key]).encode('utf-8')))
elif key in ['maxMessageBytes']:
stream.write("%s: %s\n" % (key, data[key]))
elif key in ['aliases']:
if len(data[key]):
stream.write("%s:\n" % key)
for val in data[key]:
stream.write(" - %s\n" % val)
else:
stream.write("%s: []\n" % key)
else:
stream.write("%s: \"%s\"\n" % (key, data[key]))
if len(data['members']):
stream.write("members:\n")
for member in data['members']:
stream.write(" - %s\n" % member)
else:
stream.write("members: []\n")
if len(data['managers']):
stream.write("managers:\n")
for member in data['managers']:
stream.write(" - %s\n" % member)
else:
stream.write("managers: []\n")
if len(data['owners']):
stream.write("owners:\n")
for member in data['owners']:
stream.write(" - %s\n" % member)
else:
stream.write("owners: []\n")
def dump_cloud(self):
self.dump_data(self.cloud, sys.stdout)
def local_file(self):
file = "%s/%s.yml" % (self.local_dir, self.group_key)
return file
def export(self):
f = open(self.local_file(), 'w')
self.dump_data(self.cloud, f)
f.close()
def load_local(self):
file = self.local_file()
if os.path.exists(file):
self.local = yaml.load(open(file).read().decode('utf-8'))
def diff(self):
if not self.local.has_key('name'):
self.load_local()
for key in GROUP_PARAMS:
if self.local.has_key(key) and self.cloud.has_key(key):
if self.local[key] != self.cloud[key]:
print colored("-%s: %s (cloud)" % (key, self.cloud[key]), 'red')
print colored("+%s: %s (local)" % (key, self.local[key]), 'green')
elif self.local.has_key(key):
print colored("+%s: %s (local)" % (key, self.local[key]), 'green')
elif self.cloud.has_key(key):
print colored("-%s: %s (cloud)" % (key, self.cloud[key]), 'red')
for key in ['members', 'managers', 'owners']:
only_cloud = [x for x in self.cloud[key] if x not in self.local[key]]
only_local = [x for x in self.local[key] if x not in self.cloud[key]]
if len(only_cloud) or len(only_local):
print "%s:" % key
for x in only_cloud:
print colored("- - %s (cloud)" % x, 'red')
for x in only_local:
print colored("+ - %s (local)" % x, 'green')
def apply(self, sv):
if not self.local.has_key('name'):
self.load_local()
body = {}
update_keys = []
for key in GROUP_PARAMS:
if key not in ['name', 'description', 'aliases']:
if self.cloud[key] != self.local[key]:
body[key] = self.local[key]
if len(body) > 0:
r = sv.settings_sv().update(groupUniqueId=self.group_key, body=body).execute()
print "updated"
else:
print "no changes"
def csv(self):
if not self.local.has_key('name'):
self.load_local()
description = re.sub(r'\s*\[sateraito.*$', '', self.local['description'])
return '"IU","%s","%s","%s","%s","%s"' % (self.local['email'],
self.local['name'],
','.join(self.local['members']),
','.join(self.local['owners']),
re.sub(r'"', '""', description))
def csv_header():
return '"command","email","name","members","owners","comment"'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('operation',
choices=['show', 'diff', 'export', 'apply', 'csv'],
help='operationo')
parser.add_argument('targets', nargs='+', help='domain or email list')
parser.add_argument('--dir', help='local data directory', default='.')
parser.add_argument('--encoding',
choices=['utf-8', 'sjis'],
help='csv output encoding',
default='utf-8')
args = parser.parse_args()
sv = GaService()
groups = []
for target in args.targets:
if target.find('@') >= 0:
g = GaGroup()
g.set_group_key(target)
groups.append(g)
else:
if args.operation == 'csv':
groups.extend(sv.list_local_groups(target, args.dir))
else:
groups.extend(sv.list_cloud_groups(target))
if args.operation == 'csv':
print csv_header()
for group in groups:
group.set_local_dir(args.dir)
if args.operation != 'csv':
print group.group_key
group.load_cloud(sv)
if args.operation == 'show':
group.dump_cloud()
elif args.operation == 'export':
group.export()
elif args.operation == 'diff':
group.diff()
elif args.operation == 'apply':
group.apply(sv)
elif args.operation == 'csv':
print group.csv().encode(args.encoding)
if __name__ == '__main__':
main()
| 35.217105 | 104 | 0.519055 | 7,825 | 0.730899 | 0 | 0 | 0 | 0 | 0 | 0 | 1,914 | 0.178778 |
a60183c4a0d01fe278e2d3f101040935e10e0734 | 3,716 | py | Python | icekit/utils/search/search_indexes.py | ic-labs/django-icekit | c507ea5b1864303732c53ad7c5800571fca5fa94 | [
"MIT"
] | 52 | 2016-09-13T03:50:58.000Z | 2022-02-23T16:25:08.000Z | icekit/utils/search/search_indexes.py | ic-labs/django-icekit | c507ea5b1864303732c53ad7c5800571fca5fa94 | [
"MIT"
] | 304 | 2016-08-11T14:17:30.000Z | 2020-07-22T13:35:18.000Z | icekit/utils/search/search_indexes.py | ic-labs/django-icekit | c507ea5b1864303732c53ad7c5800571fca5fa94 | [
"MIT"
] | 12 | 2016-09-21T18:46:35.000Z | 2021-02-15T19:37:50.000Z | from django.utils.text import capfirst
from easy_thumbnails.exceptions import InvalidImageFormatError
from easy_thumbnails.files import get_thumbnailer
from haystack import indexes
from haystack.utils import get_model_ct
# Doesn't extend `indexes.Indexable` to avoid auto-detection for 'Search In'
class AbstractLayoutIndex(indexes.SearchIndex):
"""
A search index for a publishable polymorphic model that implements
ListableMixin and LayoutFieldMixin.
Subclasses will need to mix in `indexes.Indexable` and implement
`get_model(self)`. They may need to override the `text` field to specify
a different template name.
Derived classes must override the `get_model()` method to return the
specific class (not an instance) that the search index will use.
"""
# Content
text = indexes.CharField(document=True, use_template=True, template_name="search/indexes/icekit/default.txt")
get_type = indexes.CharField()
get_title = indexes.CharField(model_attr='get_title', boost=2.0)
get_oneliner = indexes.CharField(model_attr='get_oneliner')
boosted_search_terms = indexes.CharField(model_attr="get_boosted_search_terms", boost=2.0, null=True)
# Meta
get_absolute_url = indexes.CharField(model_attr='get_absolute_url')
get_list_image_url = indexes.CharField()
modification_date = indexes.DateTimeField()
language_code = indexes.CharField()
# SEO Translations
meta_keywords = indexes.CharField()
meta_description = indexes.CharField()
meta_title = indexes.CharField()
# We add this for autocomplete.
content_auto = indexes.EdgeNgramField(model_attr='get_title')
# facets
# top-level result type
search_types = indexes.MultiValueField(faceted=True)
def index_queryset(self, using=None):
"""
Index published objects.
"""
return self.get_model().objects.published().select_related()
def full_prepare(self, obj):
"""
Make django_ct equal to the type of get_model, to make polymorphic
children show up in results.
"""
prepared_data = super(AbstractLayoutIndex, self).full_prepare(obj)
prepared_data['django_ct'] = get_model_ct(self.get_model())
return prepared_data
def prepare_get_type(self, obj):
if hasattr(obj, 'get_type'):
return unicode(obj.get_type())
return ""
def prepare_get_list_image_url(self, obj):
list_image = getattr(obj, "get_list_image", lambda x: None)()
if list_image:
# resize according to the `list_image` alias
try:
return get_thumbnailer(list_image)['list_image'].url
except InvalidImageFormatError:
pass
return ""
def prepare_modification_date(self, obj):
return getattr(obj, "modification_date", None)
def prepare_language_code(self, obj):
return getattr(obj, "language_code", None)
def prepare_meta_keywords(self, obj):
return getattr(obj, "meta_keywords", None)
def prepare_meta_description(self, obj):
return getattr(obj, "meta_description", None)
def prepare_meta_title(self, obj):
return getattr(obj, "meta_title", None)
def prepare_search_types(self, obj):
r = [capfirst(obj.get_type_plural())]
if hasattr(obj, 'is_educational') and obj.is_educational():
r.append('Education')
return r
def prepare(self, obj):
data = super(AbstractLayoutIndex, self).prepare(obj)
# ensure default boost amount for field_value_factor calculations.
if not data.has_key('boost'):
data['boost'] = 1.0
return data
| 35.730769 | 113 | 0.689451 | 3,415 | 0.918999 | 0 | 0 | 0 | 0 | 0 | 0 | 1,188 | 0.319699 |
a60246e782926af300cad036aa0701ca98dcd72d | 1,655 | py | Python | CSC-291/Projects/bsearch_timer.py | FrancesCoronel/cs-hu | ecd103a525fd312146d3b6c69ee7c1452548c5e2 | [
"MIT"
] | 2 | 2016-12-05T06:15:34.000Z | 2016-12-15T10:56:50.000Z | CSC-291/Projects/bsearch_timer.py | fvcproductions/CS-HU | ecd103a525fd312146d3b6c69ee7c1452548c5e2 | [
"MIT"
] | null | null | null | CSC-291/Projects/bsearch_timer.py | fvcproductions/CS-HU | ecd103a525fd312146d3b6c69ee7c1452548c5e2 | [
"MIT"
] | 3 | 2019-04-06T01:45:54.000Z | 2020-04-24T16:55:32.000Z | '''
FVCproductions
September 18, 2014
Python
CSC291_Project3
'''
# Binary Search: (20 pts)
# Implement binary search, submit it to the course website. You must write a
# function bsearch that takes and list and an element to search for. This
# function should return the index of the element if found and -1 otherwise.
#
# Note: Pay attention to the order of the paremeters specified above.
#
# Challenge (not mandatory): Learn about recursion and also write a function bsearchRecursive that
# uses recursion to find the index of the value.
from datetime import datetime
# timer - starting time defined
start_time = datetime.now()
sortedList = [1,2,3,4,5] #sorted list
target = 3 #element to search for
print "sorted list: " + str(sortedList)
print "\nelement to search for: " + str(target)
# challenge bit
def bsearch(blist, element):
low = 0
high = len(blist)-1
while low <= high:
mid = (low+high)/2
test = blist[mid]
if element == test:
return mid
elif element < test:
high = mid -1
else:
low = mid +1
return -1
recursive_output = bsearch(sortedList, target)
print "\nbsearch recursive - index of element: " + str(recursive_output)
finish_time = datetime.now()
difference_time = finish_time - start_time
micro = difference_time.microseconds
mili = (difference_time.microseconds)/(1000)
print "\ntime it took to complete program in microseconds: " + str(micro)
print "\ntime it took to complete program in miliseconds: " + str(mili) | 22.671233 | 100 | 0.650151 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 842 | 0.508761 |
a6037da7a2fe3008ebf40e96199ded29a7213caa | 15,599 | py | Python | src/stages.py | khalidm/vcf_annotation_pipeline | 4939d1a92b87f29bacfa1521de63dd8d4283acdc | [
"BSD-3-Clause"
] | null | null | null | src/stages.py | khalidm/vcf_annotation_pipeline | 4939d1a92b87f29bacfa1521de63dd8d4283acdc | [
"BSD-3-Clause"
] | null | null | null | src/stages.py | khalidm/vcf_annotation_pipeline | 4939d1a92b87f29bacfa1521de63dd8d4283acdc | [
"BSD-3-Clause"
] | null | null | null | '''
Individual stages of the pipeline implemented as functions from
input files to output files.
The run_stage function knows everything about submitting jobs and, given
the state parameter, has full access to the state of the pipeline, such
as config, options, DRMAA and the logger.
'''
from utils import safe_make_dir
from runner import run_stage
import os
PICARD_JAR = '$PICARD_HOME/lib/picard.jar'
GATK_JAR = '$GATK_HOME/GenomeAnalysisTK.jar'
def java_command(jar_path, mem_in_gb, command_args):
'''Build a string for running a java command'''
# Bit of room between Java's max heap memory and what was requested.
# Allows for other Java memory usage, such as stack.
java_mem = mem_in_gb - 2
return 'java -Xmx{mem}g -jar {jar_path} {command_args}'.format(
jar_path=jar_path, mem=java_mem, command_args=command_args)
def run_java(state, stage, jar_path, mem, args):
command = java_command(jar_path, mem, args)
run_stage(state, stage, command)
class Stages(object):
def __init__(self, state):
self.state = state
self.reference = self.get_options('ref_grch37')
self.dbsnp_grch37 = self.get_options('dbsnp_grch37')
self.mills_grch37 = self.get_options('mills_grch37')
self.one_k_g_snps = self.get_options('one_k_g_snps')
self.one_k_g_indels = self.get_options('one_k_g_indels')
self.one_k_g_highconf_snps = self.get_options('one_k_g_highconf_snps')
self.hapmap = self.get_options('hapmap')
self.interval_grch37 = self.get_options('interval_grch37')
self.CEU_mergeGvcf = self.get_options('CEU_mergeGvcf')
self.GBR_mergeGvcf = self.get_options('GBR_mergeGvcf')
self.FIN_mergeGvcf = self.get_options('FIN_mergeGvcf')
self.SNPEFFJAR = self.get_options('SNPEFFJAR')
self.SNPEFFCONF = self.get_options('SNPEFFCONF')
def run_picard(self, stage, args):
mem = int(self.state.config.get_stage_options(stage, 'mem'))
return run_java(self.state, stage, PICARD_JAR, mem, args)
def run_gatk(self, stage, args):
mem = int(self.state.config.get_stage_options(stage, 'mem'))
return run_java(self.state, stage, GATK_JAR, mem, args)
def get_stage_options(self, stage, *options):
return self.state.config.get_stage_options(stage, *options)
def get_options(self, *options):
return self.state.config.get_options(*options)
def original_vcf(self, output):
'''Original vcf file'''
pass
def decompose_vcf(self, inputs, vcf_out, sample_id):
'''Decompose and normalize the input raw VCF file using Vt'''
vcf_in = inputs
cores = self.get_stage_options('decompose_vcf', 'cores')
command = 'cat {vcf_in} | vt decompose -s - |' \
'vt normalize -r {reference} - > {vcf_out}'
.format(vcf_in=vcf_in,
reference=self.reference,
vcf=vcf_out)
run_stage(self.state, 'decompose_vcf', command)
def annotate_vep(self, vcf_in, vcf_out):
'''Annotate VCF file using VEP'''
cores = self.get_stage_options('annotate_vep', 'cores')
command = 'perl $VEPPATH/variant_effect_predictor.pl ' \
'--cache -i {vcf_in} --format vcf -o {vcf_out} --force_overwrite '\
'--vcf --fork {threads} --everything --offline --coding_only --no_intergenic '\
'--plugin LoF,human_ancestor_fa:~/.vep/homo_sapiens/77_GRCh37/human_ancestor.fa.gz '\
'-custom ~/reference/ExAC0.3/ExAC.r0.3.sites.vep.vcf.gz,ExAC,vcf,exact,0,AF,AC,AC_AFR,'\
'AC_AMR,AC_Adj,AC_EAS,AC_FIN,AC_Het,AC_Hom,AC_NFE,AC_OTH,AC_SAS,AF,AN,AN_AFR,AN_AMR,AN_Adj,'\
'AN_EAS,AN_FIN,AN_NFE,AN_OTH,AN_SAS'.format(vcf_in=vcf_in, vcf_out=vcf_out, threads=cores)
self.run_picard('annotate_vep', command)
def annotate_snpeff(self, vcf_in, vcf_out):
'''Annotate VCF file using SnpEff'''
cores = self.get_stage_options('annotate_snpeff', 'cores')
command = 'java -jar {snpeffjar} eff -c {snpeffconf} -canon hg19 {vcf_in} > '\
'{vcf_out}'.format(snpeffjar=self.snpeffjar,snpeffconf=self.snpeffconf,vcf_in=vcf_in, vcf_out=vcf_out)
self.run_picard('annotate_snpeff', command)
def mark_duplicates_picard(self, bam_in, outputs):
'''Mark duplicate reads using Picard'''
dedup_bam_out, metrics_out = outputs
picard_args = 'MarkDuplicates INPUT={bam_in} OUTPUT={dedup_bam_out} ' \
'METRICS_FILE={metrics_out} VALIDATION_STRINGENCY=LENIENT ' \
'MAX_RECORDS_IN_RAM=5000000 ASSUME_SORTED=True ' \
'CREATE_INDEX=True'.format(bam_in=bam_in, dedup_bam_out=dedup_bam_out,
metrics_out=metrics_out)
self.run_picard('mark_duplicates_picard', picard_args)
def chrom_intervals_gatk(self, inputs, intervals_out):
'''Generate chromosome intervals using GATK'''
bam_in, _metrics_dup = inputs
cores = self.get_stage_options('chrom_intervals_gatk', 'cores')
gatk_args = '-T RealignerTargetCreator -R {reference} -I {bam} ' \
'--num_threads {threads} --known {mills_grch37} ' \
'--known {one_k_g_indels} -L {interval_grch37} ' \
'-o {out}'.format(reference=self.reference, bam=bam_in,
threads=cores, mills_grch37=self.mills_grch37,
one_k_g_indels=self.one_k_g_indels,
interval_grch37=self.interval_grch37,
out=intervals_out)
self.run_gatk('chrom_intervals_gatk', gatk_args)
def local_realignment_gatk(self, inputs, bam_out):
'''Local realign reads using GATK'''
target_intervals_in, bam_in = inputs
gatk_args = "-T IndelRealigner -R {reference} -I {bam} -L {interval_grch37} " \
"-targetIntervals {target_intervals} -known {mills_grch37} " \
"-known {one_k_g_indels} " \
"-o {out}".format(reference=self.reference, bam=bam_in,
mills_grch37=self.mills_grch37,
one_k_g_indels=self.one_k_g_indels,
interval_grch37=self.interval_grch37,
target_intervals=target_intervals_in,
out=bam_out)
self.run_gatk('local_realignment_gatk', gatk_args)
# XXX I'm not sure that --num_cpu_threads_per_data_thread has any benefit here
def base_recalibration_gatk(self, bam_in, outputs):
'''Base recalibration using GATK'''
csv_out, log_out = outputs
gatk_args = "-T BaseRecalibrator -R {reference} -I {bam} " \
"--num_cpu_threads_per_data_thread 4 --knownSites {dbsnp_grch37} " \
"--knownSites {mills_grch37} --knownSites {one_k_g_indels} " \
"-log {log} -o {out}".format(reference=self.reference, bam=bam_in,
mills_grch37=self.mills_grch37, dbsnp_grch37=self.dbsnp_grch37,
one_k_g_indels=self.one_k_g_indels,
log=log_out, out=csv_out)
self.run_gatk('base_recalibration_gatk', gatk_args)
# XXX I'm not sure that --num_cpu_threads_per_data_thread has any benefit here
def print_reads_gatk(self, inputs, bam_out):
'''Print reads using GATK'''
[csv_in, _log], bam_in = inputs
gatk_args = "-T PrintReads -R {reference} -I {bam} --BQSR {recal_csv} " \
"-o {out} --num_cpu_threads_per_data_thread 4".format(reference=self.reference,
bam=bam_in, recal_csv=csv_in, out=bam_out)
self.run_gatk('print_reads_gatk', gatk_args)
def call_variants_gatk(self, bam_in, vcf_out):
'''Call variants using GATK'''
gatk_args = "-T HaplotypeCaller -R {reference} --min_base_quality_score 20 " \
"--variant_index_parameter 128000 --emitRefConfidence GVCF " \
"--standard_min_confidence_threshold_for_calling 30.0 " \
"--num_cpu_threads_per_data_thread 8 " \
"--variant_index_type LINEAR " \
"--standard_min_confidence_threshold_for_emitting 30.0 " \
"-I {bam} -L {interval_list} -o {out}".format(reference=self.reference,
bam=bam_in, interval_list=self.interval_grch37, out=vcf_out)
self.run_gatk('call_variants_gatk', gatk_args)
def combine_gvcf_gatk(self, vcf_files_in, vcf_out):
'''Combine G.VCF files for all samples using GATK'''
g_vcf_files = ' '.join(['--variant ' + vcf for vcf in vcf_files_in])
gatk_args = "-T CombineGVCFs -R {reference} " \
"--disable_auto_index_creation_and_locking_when_reading_rods " \
"{g_vcf_files} -o {vcf_out}".format(reference=self.reference,
g_vcf_files=g_vcf_files, vcf_out=vcf_out)
self.run_gatk('combine_gvcf_gatk', gatk_args)
def genotype_gvcf_gatk(self, merged_vcf_in, vcf_out):
'''Genotype G.VCF files using GATK'''
cores = self.get_stage_options('genotype_gvcf_gatk', 'cores')
gatk_args = "-T GenotypeGVCFs -R {reference} " \
"--disable_auto_index_creation_and_locking_when_reading_rods " \
"--num_threads {cores} --variant {merged_vcf} --out {vcf_out} " \
"--variant {CEU_mergeGvcf} --variant {GBR_mergeGvcf} " \
"--variant {FIN_mergeGvcf}".format(reference=self.reference,
cores=cores, merged_vcf=merged_vcf_in, vcf_out=vcf_out,
CEU_mergeGvcf=self.CEU_mergeGvcf, GBR_mergeGvcf=self.GBR_mergeGvcf,
FIN_mergeGvcf=self.FIN_mergeGvcf)
self.run_gatk('genotype_gvcf_gatk', gatk_args)
def snp_recalibrate_gatk(self, genotype_vcf_in, outputs):
'''SNP recalibration using GATK'''
recal_snp_out, tranches_snp_out, snp_plots_r_out = outputs
cores = self.get_stage_options('snp_recalibrate_gatk', 'cores')
gatk_args = "-T VariantRecalibrator --disable_auto_index_creation_and_locking_when_reading_rods " \
"-R {reference} --minNumBadVariants 5000 --num_threads {cores} " \
"-resource:hapmap,known=false,training=true,truth=true,prior=15.0 {hapmap} " \
"-resource:omni,known=false,training=true,truth=true,prior=12.0 {one_k_g_snps} " \
"-resource:1000G,known=false,training=true,truth=false,prior=10.0 {one_k_g_highconf_snps} " \
"-an QD -an MQRankSum -an ReadPosRankSum -an FS -an InbreedingCoeff " \
"-input {genotype_vcf} --recal_file {recal_snp} --tranches_file {tranches_snp} " \
"-rscriptFile {snp_plots} -mode SNP".format(reference=self.reference,
cores=cores, hapmap=self.hapmap, one_k_g_snps=self.one_k_g_snps,
one_k_g_highconf_snps=self.one_k_g_highconf_snps, genotype_vcf=genotype_vcf_in,
recal_snp=recal_snp_out, tranches_snp=tranches_snp_out, snp_plots=snp_plots_r_out)
self.run_gatk('snp_recalibrate_gatk', gatk_args)
def indel_recalibrate_gatk(self, genotype_vcf_in, outputs):
'''INDEL recalibration using GATK'''
recal_indel_out, tranches_indel_out, indel_plots_r_out = outputs
cores = self.get_stage_options('indel_recalibrate_gatk', 'cores')
gatk_args = "-T VariantRecalibrator --disable_auto_index_creation_and_locking_when_reading_rods " \
"-R {reference} --minNumBadVariants 5000 --num_threads {cores} " \
"-resource:mills,known=false,training=true,truth=true,prior=12.0 {mills_grch37} " \
"-resource:1000G,known=false,training=true,truth=true,prior=10.0 {one_k_g_indels} " \
"-an MQRankSum -an ReadPosRankSum -an FS -input {genotype_vcf} -recalFile {recal_indel} " \
"-tranchesFile {tranches_indel} -rscriptFile {indel_plots} " \
" -mode INDEL".format(reference=self.reference,
cores=cores, mills_grch37=self.mills_grch37, one_k_g_indels=self.one_k_g_indels,
genotype_vcf=genotype_vcf_in, recal_indel=recal_indel_out,
tranches_indel=tranches_indel_out, indel_plots=indel_plots_r_out)
self.run_gatk('indel_recalibrate_gatk', gatk_args)
def apply_snp_recalibrate_gatk(self, inputs, vcf_out):
'''Apply SNP recalibration using GATK'''
genotype_vcf_in, [recal_snp, tranches_snp] = inputs
cores = self.get_stage_options('apply_snp_recalibrate_gatk', 'cores')
gatk_args = "-T ApplyRecalibration --disable_auto_index_creation_and_locking_when_reading_rods " \
"-R {reference} --ts_filter_level 99.5 --excludeFiltered --num_threads {cores} " \
"-input {genotype_vcf} -recalFile {recal_snp} -tranchesFile {tranches_snp} " \
"-mode SNP -o {vcf_out}".format(reference=self.reference,
cores=cores, genotype_vcf=genotype_vcf_in, recal_snp=recal_snp,
tranches_snp=tranches_snp, vcf_out=vcf_out)
self.run_gatk('apply_snp_recalibrate_gatk', gatk_args)
def apply_indel_recalibrate_gatk(self, inputs, vcf_out):
'''Apply INDEL recalibration using GATK'''
genotype_vcf_in, [recal_indel, tranches_indel] = inputs
cores = self.get_stage_options('apply_indel_recalibrate_gatk', 'cores')
gatk_args = "-T ApplyRecalibration --disable_auto_index_creation_and_locking_when_reading_rods " \
"-R {reference} --ts_filter_level 99.0 --excludeFiltered --num_threads {cores} " \
"-input {genotype_vcf} -recalFile {recal_indel} -tranchesFile {tranches_indel} " \
"-mode INDEL -o {vcf_out}".format(reference=self.reference,
cores=cores, genotype_vcf=genotype_vcf_in, recal_indel=recal_indel,
tranches_indel=tranches_indel, vcf_out=vcf_out)
self.run_gatk('apply_indel_recalibrate_gatk', gatk_args)
def combine_variants_gatk(self, inputs, vcf_out):
'''Combine variants using GATK'''
recal_snp, [recal_indel] = inputs
cores = self.get_stage_options('combine_variants_gatk', 'cores')
gatk_args = "-T CombineVariants -R {reference} --disable_auto_index_creation_and_locking_when_reading_rods " \
"--num_threads {cores} --genotypemergeoption UNSORTED --variant {recal_snp} " \
"--variant {recal_indel} -o {vcf_out}".format(reference=self.reference,
cores=cores, recal_snp=recal_snp, recal_indel=recal_indel,
vcf_out=vcf_out)
self.run_gatk('combine_variants_gatk', gatk_args)
def select_variants_gatk(self, combined_vcf, vcf_out):
'''Select variants using GATK'''
gatk_args = "-T SelectVariants -R {reference} --disable_auto_index_creation_and_locking_when_reading_rods " \
"--variant {combined_vcf} -select 'DP > 100' -o {vcf_out}".format(reference=self.reference,
combined_vcf=combined_vcf, vcf_out=vcf_out)
self.run_gatk('select_variants_gatk', gatk_args)
| 55.710714 | 118 | 0.640233 | 2,014 | 0.129111 | 0 | 0 | 0 | 0 | 0 | 0 | 6,359 | 0.407654 |
a603ad8008600d2c8038cd2fcf8557be817d480f | 1,959 | py | Python | chrome/test/pyautolib/generate_docs.py | nagineni/chromium-crosswalk | 5725642f1c67d0f97e8613ec1c3e8107ab53fdf8 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 231 | 2015-01-08T09:04:44.000Z | 2021-12-30T03:03:10.000Z | chrome/test/pyautolib/generate_docs.py | 1065672644894730302/Chromium | 239dd49e906be4909e293d8991e998c9816eaa35 | [
"BSD-3-Clause"
] | 5 | 2015-03-27T14:29:23.000Z | 2019-09-25T13:23:12.000Z | chrome/test/pyautolib/generate_docs.py | 1065672644894730302/Chromium | 239dd49e906be4909e293d8991e998c9816eaa35 | [
"BSD-3-Clause"
] | 268 | 2015-01-21T05:53:28.000Z | 2022-03-25T22:09:01.000Z | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import os
import pydoc
import shutil
import sys
def main():
parser = optparse.OptionParser()
parser.add_option('-w', '--write', dest='dir', metavar='FILE',
default=os.path.join(os.getcwd(), 'pyauto_docs'),
help=('Directory path to write all of the documentation. '
'Defaults to "pyauto_docs" in current directory.'))
parser.add_option('-p', '--pyautolib', dest='pyautolib', metavar='FILE',
default=os.getcwd(),
help='Location of pyautolib directory')
(options, args) = parser.parse_args()
if not os.path.isdir(options.dir):
os.makedirs(options.dir)
# Add these paths so pydoc can find everything
sys.path.append(os.path.join(options.pyautolib,
'../../../third_party/'))
sys.path.append(options.pyautolib)
# Get a snapshot of the current directory where pydoc will export the files
previous_contents = set(os.listdir(os.getcwd()))
pydoc.writedocs(options.pyautolib)
current_contents = set(os.listdir(os.getcwd()))
if options.dir == os.getcwd():
print 'Export complete, files are located in %s' % options.dir
return 1
new_files = current_contents.difference(previous_contents)
for file_name in new_files:
basename, extension = os.path.splitext(file_name)
if extension == '.html':
# Build the complete path
full_path = os.path.join(os.getcwd(), file_name)
existing_file_path = os.path.join(options.dir, file_name)
if os.path.isfile(existing_file_path):
os.remove(existing_file_path)
shutil.move(full_path, options.dir)
print 'Export complete, files are located in %s' % options.dir
return 0
if __name__ == '__main__':
sys.exit(main())
| 33.775862 | 78 | 0.671261 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 659 | 0.336396 |
a6049452e4aac0634d91765ee03449bd362ab74a | 1,046 | py | Python | swagger_ui/__init__.py | dirkgomez/voice-skill-sdk | ff8a3cc226f48d65fe9ad06741a03b8205f1f0b5 | [
"MIT"
] | null | null | null | swagger_ui/__init__.py | dirkgomez/voice-skill-sdk | ff8a3cc226f48d65fe9ad06741a03b8205f1f0b5 | [
"MIT"
] | null | null | null | swagger_ui/__init__.py | dirkgomez/voice-skill-sdk | ff8a3cc226f48d65fe9ad06741a03b8205f1f0b5 | [
"MIT"
] | null | null | null | #
# voice-skill-sdk
#
# (C) 2020, Deutsche Telekom AG
#
# Deutsche Telekom AG and all other contributors /
# copyright owners license this file to you under the MIT
# License (the "License"); you may not use this file
# except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from pathlib import Path
from bottle import get, static_file, redirect
here: Path = Path(__file__).absolute().parent
UI_ROOT = here / 'node_modules/swagger-ui-dist'
@get('/')
def root():
return redirect('/swagger-ui/')
@get('/swagger-ui/')
@get('/swagger-ui/<filename:path>')
def send_static(filename=None):
return static_file(filename or 'index.html', root=UI_ROOT)
| 26.820513 | 62 | 0.739006 | 0 | 0 | 0 | 0 | 208 | 0.198853 | 0 | 0 | 746 | 0.713193 |
a6053be31232e1f11d342234014c05d2dc6f686a | 1,112 | py | Python | courses/migrations/0009_enrollment_statuses.py | mitodl/mit-xpro | 981d6c87d963837f0b9ccdd996067fe81394dba4 | [
"BSD-3-Clause"
] | 10 | 2019-02-20T18:41:32.000Z | 2021-07-26T10:39:58.000Z | courses/migrations/0009_enrollment_statuses.py | mitodl/mit-xpro | 981d6c87d963837f0b9ccdd996067fe81394dba4 | [
"BSD-3-Clause"
] | 2,226 | 2019-02-20T20:03:57.000Z | 2022-03-31T11:18:56.000Z | courses/migrations/0009_enrollment_statuses.py | mitodl/mit-xpro | 981d6c87d963837f0b9ccdd996067fe81394dba4 | [
"BSD-3-Clause"
] | 4 | 2020-08-26T19:26:02.000Z | 2021-03-09T17:46:47.000Z | # Generated by Django 2.1.7 on 2019-05-24 19:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("courses", "0008_enrollment_company")]
operations = [
migrations.AddField(
model_name="courserunenrollment",
name="active",
field=models.BooleanField(
default=True,
help_text="Indicates whether or not this enrollment should be considered active",
),
),
migrations.AddField(
model_name="courserunenrollment",
name="edx_enrolled",
field=models.BooleanField(
default=False,
help_text="Indicates whether or not the request succeeded to enroll via the edX API",
),
),
migrations.AddField(
model_name="programenrollment",
name="active",
field=models.BooleanField(
default=True,
help_text="Indicates whether or not this enrollment should be considered active",
),
),
]
| 30.888889 | 101 | 0.571942 | 1,019 | 0.916367 | 0 | 0 | 0 | 0 | 0 | 0 | 386 | 0.347122 |
a607916008e86d338b3f708f7b4629cb6af9e328 | 2,665 | py | Python | Course 1 - Introduction to Python/Unit4_Good_Programming_Practices/ProblemSet_4/Problem1_WordScores.py | Odzen/MIT_React_DataScience | 3b82ef75dab1880cdbf0136a7b716e90cb27a9cb | [
"MIT"
] | null | null | null | Course 1 - Introduction to Python/Unit4_Good_Programming_Practices/ProblemSet_4/Problem1_WordScores.py | Odzen/MIT_React_DataScience | 3b82ef75dab1880cdbf0136a7b716e90cb27a9cb | [
"MIT"
] | null | null | null | Course 1 - Introduction to Python/Unit4_Good_Programming_Practices/ProblemSet_4/Problem1_WordScores.py | Odzen/MIT_React_DataScience | 3b82ef75dab1880cdbf0136a7b716e90cb27a9cb | [
"MIT"
] | null | null | null | SCRABBLE_LETTER_VALUES = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1, 'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8, 'k': 5, 'l': 1, 'm': 3, 'n': 1, 'o': 1, 'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1, 'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4, 'z': 10
}
# The first step is to implement some code that allows us to calculate the score
# for a single word. The function getWordScore should accept as input a string
# of lowercase letters (a word) and return the integer score for that word,
# using the game's scoring rules.
# A Reminder of the Scoring Rules
# Hints You may assume that the input word is always either a string of
# lowercase letters, or the empty string "". You will want to use the
# SCRABBLE_LETTER_VALUES dictionary defined at the top of ps4a.py. You should
# not change its value. Do not assume that there are always 7 letters in a hand!
# The parameter n is the number of letters required for a bonus score (the
# maximum number of letters in the hand). Our goal is to keep the code modular -
# if you want to try playing your word game with n=10 or n=4, you will be able
# to do it by simply changing the value of HAND_SIZE! Testing: If this function
# is implemented properly, and you run test_ps4a.py, you should see that the
# test_getWordScore() tests pass. Also test your implementation of getWordScore,
# using some reasonable English words. Fill in the code for getWordScore in
# ps4a.py and be sure you've passed the appropriate tests in test_ps4a.py before
# pasting your function definition here.
def getWordScore(word, n):
"""
Returns the score for a word. Assumes the word is a valid word.
The score for a word is the sum of the points for letters in the
word, multiplied by the length of the word, PLUS 50 points if all n
letters are used on the first turn.
Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is
worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES)
word: string (lowercase letters)
n: integer (HAND_SIZE; i.e., hand size required for additional points)
returns: int >= 0
"""
if len(word) == 0:
return 0
# wordScore is the cumulative score for this word
wordScore = 0
# lettersUsed is cumulative number of letters used in this word
lettersUsed = 0
for theLetter in word:
# For each letter, determine it's value
wordScore += SCRABBLE_LETTER_VALUES[theLetter]
lettersUsed += 1
# Multiply score so far by number of letters used
wordScore *= lettersUsed
# if all letters were used, then add 50 to score
if lettersUsed == n:
wordScore += 50
return wordScore | 45.948276 | 212 | 0.685178 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,133 | 0.800375 |
a607b192011ac8e285ffec176b21be2df18e07fe | 1,647 | py | Python | python/86.partition-list.py | Zhenye-Na/leetcode | 95196a45f5709ccf7b970ee5ac84a4bf8fe2301e | [
"MIT"
] | 10 | 2019-09-15T00:23:57.000Z | 2022-01-05T12:53:42.000Z | python/86.partition-list.py | Zhenye-Na/leetcode | 95196a45f5709ccf7b970ee5ac84a4bf8fe2301e | [
"MIT"
] | 3 | 2021-06-30T00:39:26.000Z | 2021-08-01T07:13:59.000Z | python/86.partition-list.py | Zhenye-Na/leetcode | 95196a45f5709ccf7b970ee5ac84a4bf8fe2301e | [
"MIT"
] | 6 | 2020-02-08T02:55:22.000Z | 2022-01-02T22:48:18.000Z | #
# @lc app=leetcode id=86 lang=python3
#
# [86] Partition List
#
# https://leetcode.com/problems/partition-list/description/
#
# algorithms
# Medium (43.12%)
# Likes: 1981
# Dislikes: 384
# Total Accepted: 256.4K
# Total Submissions: 585.7K
# Testcase Example: '[1,4,3,2,5,2]\n3'
#
# Given the head of a linked list and a value x, partition it such that all
# nodes less than x come before nodes greater than or equal to x.
#
# You should preserve the original relative order of the nodes in each of the
# two partitions.
#
#
# Example 1:
#
#
# Input: head = [1,4,3,2,5,2], x = 3
# Output: [1,2,2,4,3,5]
#
#
# Example 2:
#
#
# Input: head = [2,1], x = 2
# Output: [1,2]
#
#
#
# Constraints:
#
#
# The number of nodes in the list is in the range [0, 200].
# -100 <= Node.val <= 100
# -200 <= x <= 200
#
#
#
# @lc code=start
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def partition(self, head: ListNode, x: int) -> ListNode:
if not head:
return head
smaller_dummy = ListNode(0)
greater_dummy = ListNode(0)
smaller = smaller_dummy
greater = greater_dummy
node = head
while node:
if node.val < x:
smaller.next = node
smaller = smaller.next
else:
greater.next = node
greater = greater.next
node = node.next
greater.next = None
smaller.next = greater_dummy.next
return smaller_dummy.next
# @lc code=end
| 20.5875 | 77 | 0.579842 | 631 | 0.383121 | 0 | 0 | 0 | 0 | 0 | 0 | 960 | 0.582878 |
a60999b70bdab73a337c0a801d933159535f4296 | 382 | py | Python | schemas/users.py | Ashishb21/propertyConnect | 60bc23125e4b45cc5e0c3b2cff8d57afd57ecbb8 | [
"MIT"
] | null | null | null | schemas/users.py | Ashishb21/propertyConnect | 60bc23125e4b45cc5e0c3b2cff8d57afd57ecbb8 | [
"MIT"
] | null | null | null | schemas/users.py | Ashishb21/propertyConnect | 60bc23125e4b45cc5e0c3b2cff8d57afd57ecbb8 | [
"MIT"
] | null | null | null | from typing import Optional
from pydantic import BaseModel,EmailStr
#properties required during user creation
class RegisterUser(BaseModel):
username: str
email : EmailStr
password : str
phone_no: int
class ShowUser(BaseModel):
username: str
email: EmailStr
is_active: bool
class Config: # to convert non dict obj to json
orm_mode = True | 21.222222 | 52 | 0.717277 | 268 | 0.701571 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.193717 |
a609fb2abfa8b580d67f255628dff6280de6c8ed | 773 | py | Python | server/migrations/versions/b5ae9c8c6118_.py | uptownnickbrown/metaseq | fc6853640921ca4853b3d4ed3d3354855983db11 | [
"MIT"
] | 7 | 2017-03-27T09:57:55.000Z | 2018-06-09T17:44:31.000Z | server/migrations/versions/b5ae9c8c6118_.py | uptownnickbrown/metaseq | fc6853640921ca4853b3d4ed3d3354855983db11 | [
"MIT"
] | 4 | 2019-06-05T15:07:49.000Z | 2021-12-13T19:46:40.000Z | server/migrations/versions/b5ae9c8c6118_.py | uptownnickbrown/metaseq | fc6853640921ca4853b3d4ed3d3354855983db11 | [
"MIT"
] | 1 | 2019-01-28T07:02:25.000Z | 2019-01-28T07:02:25.000Z | """empty message
Revision ID: b5ae9c8c6118
Revises: aa1ce7a80455
Create Date: 2017-06-28 11:39:37.100530
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b5ae9c8c6118'
down_revision = 'aa1ce7a80455'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint(None, 'publication', ['pubmed_uid'])
op.create_unique_constraint(None, 'run', ['run_id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'run', type_='unique')
op.drop_constraint(None, 'publication', type_='unique')
# ### end Alembic commands ###
| 24.935484 | 68 | 0.698577 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 432 | 0.558862 |
a60ac9ee60b4cd5f2b4bfcc6ac5ec20715f1d582 | 564 | py | Python | seventweets/handlers/base.py | sbg/seventweets | a2733ff175e8be6e34f346a9a14f7f30ab24ea74 | [
"Apache-2.0"
] | 2 | 2017-06-12T11:00:38.000Z | 2018-06-21T07:54:38.000Z | seventweets/handlers/base.py | sbg/seventweets | a2733ff175e8be6e34f346a9a14f7f30ab24ea74 | [
"Apache-2.0"
] | null | null | null | seventweets/handlers/base.py | sbg/seventweets | a2733ff175e8be6e34f346a9a14f7f30ab24ea74 | [
"Apache-2.0"
] | null | null | null | from flask import Blueprint, current_app, jsonify
from seventweets.exceptions import error_handler
from seventweets import tweet
base = Blueprint('base', __name__)
@base.route('/')
@error_handler
def index():
original = tweet.count('original')
retweets = tweet.count('retweet')
return jsonify({
'name': current_app.config['ST_OWN_NAME'],
'address': current_app.config['ST_OWN_ADDRESS'],
'stats': {
'original': original,
'retweets': retweets,
'total': original + retweets,
}
})
| 25.636364 | 56 | 0.636525 | 0 | 0 | 0 | 0 | 396 | 0.702128 | 0 | 0 | 106 | 0.187943 |
a60eb03e65bf7d4946683c6178897b33cccd7025 | 31,947 | py | Python | rasahub_humhub/humhub.py | tomofu74/rasahub-humhub | 7e421574230cb7fb9f5de4973dae75a53323dffb | [
"MIT"
] | null | null | null | rasahub_humhub/humhub.py | tomofu74/rasahub-humhub | 7e421574230cb7fb9f5de4973dae75a53323dffb | [
"MIT"
] | null | null | null | rasahub_humhub/humhub.py | tomofu74/rasahub-humhub | 7e421574230cb7fb9f5de4973dae75a53323dffb | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from datetime import datetime
# import rasahub_google_calendar
from time import gmtime, time, strftime
import json
import locale
import logging
import math
import mysql.connector
from mysql.connector import errorcode
import os
import string
import random
import re
import yaml
from nltk.stem.snowball import SnowballStemmer
import httplib2
stemmer = SnowballStemmer("german")
logger = logging.getLogger(__name__)
offlinemode = False
locale.setlocale(locale.LC_ALL, "de_DE.utf8")
class NotAuthenticatedError(Exception):
"""
Class NotAuthenticatedError is thrown everytime a google user is not
authenticated properly.
"""
def __init__(self):
"""
Exception initialization, sets error message.
"""
self.msg = "Not Authenticated"
def __str__(self):
"""
to-String method
:return: Error message
:rtype: str
"""
return self.msg
def connectToDB(dbHost, dbName, dbPort, dbUser, dbPwd):
"""
Establishes connection to the database
:param dbHost: database host address
:type state: str.
:param dbName: database name
:type state: str.
:param dbPort: database host port
:type state: int.
:param dbUser: database username
:type name: str.
:param dbPwd: database userpassword
:type state: str.
:return: Instance of class MySQLConnection
:rtype: MySQLConnection
"""
try:
cnx = mysql.connector.connect(user=dbUser, port=int(dbPort), password=dbPwd, host=dbHost, database=dbName, autocommit=True)
except mysql.connector.Error as err:
if err.errno == errorcode.ER_ACCESS_DENIED_ERROR:
print("Something is wrong with your user name or password")
elif err.errno == errorcode.ER_BAD_DB_ERROR:
print("Database does not exist")
else:
print(err)
else:
return cnx
def getBotID(cursor):
"""
Gets a suitable Bot User ID from a Humhub User Group called 'Bots'
:return: Bots Humhub User ID
:rtype: int
"""
query = "SELECT `user_id` FROM `group` JOIN `group_user` ON `group`.`id` = `group_user`.`group_id` WHERE `group`.`name` = 'Bots' ORDER BY user_id DESC LIMIT 1;"
cursor.execute(query)
return cursor.fetchone()[0]
def getNextID(cursor, current_id, bot_id, trigger):
"""
Gets the next message ID from Humhub
:return: Next message ID to process
:rtype: int
"""
query = ("SELECT id FROM message_entry WHERE user_id <> %(bot_id)s AND (content LIKE %(trigger)s OR message_entry.message_id IN "
"(SELECT DISTINCT message_entry.message_id FROM message_entry JOIN user_message "
"ON message_entry.message_id=user_message.message_id WHERE user_message.user_id = 5 ORDER BY message_entry.message_id)) "
"AND id > %(current_id)s ORDER BY id ASC")
data = {
'bot_id': bot_id,
'trigger': trigger + '%', # wildcard for SQL
'current_id': current_id,
}
row_count = cursor.execute(query, data)
results = cursor.fetchall()
if len(results) > 0: # fetchall returns list of results, each as a tuple
return results[0][0]
else:
return current_id
def getMessage(cursor, msg_id, trigger):
"""
Gets the newest message
:returns: Containing the message itself as string and the conversation ID
:rtype: dict
"""
query = "SELECT message_id, content FROM message_entry WHERE (user_id <> 5 AND id = {})".format(msg_id)
cursor.execute(query)
result = cursor.fetchone()
message_id = result[0]
if result[1][:len(trigger)] == trigger:
message = result[1][len(trigger):].strip()
else:
message = result[1].strip()
messagedata = {
'message': message,
'message_id': message_id
}
return messagedata
def create_new_conversation(cursor, title, message, user_id, bot_id):
"""
Creates new conversation in Humhub.
:param cursor: MySQL Cursor for database processes
:param str title: Title of conversation
:param str message: First message of created conversation
:param int user_id: Humhub User ID to create conversation with
:param int bot_id: User ID to use for the bot
"""
query = "INSERT INTO message (title, created_by, updated_by) VALUES ({}, {}, {})".format(title, bot_id, bot_id)
cursor.execute(query)
message_id = cursor.lastrowid
query = "INSERT INTO user_message (message_id, user_id, created_by, updated_by) VALUES ({}, {}, {}, {})".format(message_id, user_id, bot_id, bot_id)
cursor.execute(query)
query = "INSERT INTO message_entry (message_id, user_id, content, created_by, updated_by) VALUES ({}, {}, {}, {})".format(message_id, user_id, message, bot_id, bot_id)
cursor.execute(query)
def check_google_access(message_id, cursor, bot_id):
"""
Checks google calendar access for humhub user IDs
:param int message_id: ID of message
:param cursor: MySQL Cursor for database processes
:param bot_id: Humhub User ID of bot to exclude from calendar
"""
users = getUsersInConversation(cursor, message_id, bot_id)
calendars = []
return_case = True
for userID in users:
try:
calendar = get_google_calendar_items(userID)
except:
send_auth(cursor, userID, bot_id)
return []
return return_case
def getCurrentID(cursor):
"""
Gets the current max message ID from Humhub
:return: Current max message ID
:rtype: int
"""
query = "SELECT MAX(id) FROM message_entry;"
cursor.execute(query)
return cursor.fetchone()[0]
def send_auth_link(cursor, user_id, bot_id):
"""
Sends Google auth URL to not-authentificated users
:param cursor: MySQL Cursor for database processes
:param user_id: Humhub User ID to send URL to
:param bot_id: Humhub User ID of bot to exclude from calendar
"""
title = "Bitte authentifizieren Sie sich"
message = "http://localhost:8080/" + str(user_id)
create_new_conversation(cursor, title, message, user_id, bot_id)
def getUsersInConversation(cursor, sender_id, bot_id):
"""
Returns a list of Humhub User IDs participating in the conversation using
the sender ID
:param cursor: Mysql Cursor
:type cusor: mysql.connector.cursor.MySQLCursor
:param sender_id: Humhub conversation sender ID
:type sender_id: int
:param bot_id: Bot Humhub User ID
:type bot_id: int
:return: List of users in conversation
:rtype: list
"""
query = ("""SELECT user_id FROM user_message WHERE message_id = {}
""").format(sender_id)
cursor.execute(query)
users = []
for user_id in cursor:
if user_id != bot_id:
users.append(user_id[0])
return users
def getCalendar(user_id, date, cursor):
"""
Gets calendar pattern of a given Humhub User ID
:param user_id: Humhub user ID to get the calendar information from
:type user_id: int
:param date: Specific date to get the calendar information
:type date: datetime
:param cursor: Mysql Cursor
:type cusor: mysql.connector.cursor.MySQLCursor
:return: Calendar pattern with set busy dates of user_id
:rtype: dict
"""
# create calendar pattern
calendarPattern = createCalendarPattern()
# get busy appointments
startdate = date.strftime("%Y-%m-%d 00:00:00")
enddate = date.strftime("%Y-%m-%d 23:59:59")
startdate = "'" + startdate + "'"
enddate = "'" + enddate + "'"
#query = ("""SELECT start_datetime, end_datetime FROM calendar_entry
# INNER JOIN calendar_entry_participant ON
# calendar_entry.id =
# calendar_entry_participant.calendar_entry_id
# WHERE calendar_entry_participant.user_id = {} AND
# calendar_entry_participant.participation_state = 3 AND
# calendar_entry.start_datetime BETWEEN {} AND {}
# """).format(user_id, startdate, enddate)
#cursor.execute(query)
try:
dates = get_google_calendar_items(user_id)
except:
# not authenticated
bot_id = getBotID(cursor)
send_auth_link(cursor, user_id, bot_id)
raise NotAuthenticatedError
#for (start_datetime, end_datetime) in cursor:
# busydates.append([start_datetime, end_datetime])
#cnx.close()
return setBusyDates(calendarPattern, dates)
def setBusyDates(calendarPattern, dates):
"""
Sets busy dates in a given calendar pattern using calendar information
:param calendarPattern: Blank calendar pattern
:type calendarPattern: array
:param cursor: Array containing start and end datetimes of busy dates
:type cursor: array
:return: Calendarpattern with set busy dates
:rtype: dict
"""
# Google Edition
for appointment in dates:
start = dates[appointment]['start'] # format: 2018-05-24T17:00:00
end = dates[appointment]['end']
start_datetime = strptime(start, "%Y-%m-%dT%H:%M:%S")
end_datetime = strptime(end, "%Y-%m-%dT%H:%M:%S")
# convert minute to array index, round down as its starting time
startIndex = int(float(start_datetime.minute) / 15.)
# end minute index is round up
endIndex = int(math.ceil(float(end_datetime.minute) / 15.))
endAtZero = False
if endIndex == 0:
endAtZero = True
else:
endIndex -= 1 # correct index for all cases except 0
# set all patterns to 0 between start and end indezes
for i in range(start_datetime.hour, end_datetime.hour + 1):
if start_datetime.hour == end_datetime.hour:
for j in range(startIndex, endIndex + 1):
calendarPattern[i][j] = 1
break
# three cases: i = start.hour, i = end.hour or between
if i == start_datetime.hour:
# only set to 0 beginning from startIndex to 3
for j in range(startIndex, 4):
calendarPattern[i][j] = 1
elif i == end_datetime.hour:
if endAtZero:
break
# only set to 0 beginning from 0 to endIndex
for j in range(endIndex + 1):
calendarPattern[i][j] = 1
else:
# set all to 0
for j in range(0, 4):
calendarPattern[i][j] = 1
# Humhub Edition
#for (start_datetime, end_datetime) in cursor:
# # convert minute to array index, round down as its starting time
# startIndex = int(float(start_datetime.minute) / 15.)
# # end minute index is round up
# endIndex = int(math.ceil(float(end_datetime.minute) / 15.))
# endAtZero = False
# if endIndex == 0:
# endAtZero = True
# else:
# endIndex -= 1 # correct index for all cases except 0
# # set all patterns to 0 between start and end indezes
# for i in range(start_datetime.hour, end_datetime.hour + 1):
# if start_datetime.hour == end_datetime.hour:
# for j in range(startIndex, endIndex + 1):
# calendarPattern[i][j] = 1
# break
# # three cases: i = start.hour, i = end.hour or between
# if i == start_datetime.hour:
# # only set to 0 beginning from startIndex to 3
# for j in range(startIndex, 4):
# calendarPattern[i][j] = 1
# elif i == end_datetime.hour:
# if endAtZero:
# break
# # only set to 0 beginning from 0 to endIndex
# for j in range(endIndex + 1):
# calendarPattern[i][j] = 1
# else:
# # set all to 0
# for j in range(0, 4):
# calendarPattern[i][j] = 1
return calendarPattern
def createCalendarPattern(datefrom=None, dateto=None):
"""
Creates blank calendar pattern for one day or for timeframe between
datefrom and dateto
:param datefrom: Start datetime for free timeframe
:type datefrom: datetime
:param dateto: End datetime for free timeframe
:type dateto: datetime
:return: Blank calendarpattern
:rtype: dict
"""
# matching against standardized calendar
calendarPattern = []
if datefrom and dateto:
# set to busy = 1 except given range
for i in range(24):
calendarPattern.append(i)
calendarPattern[i] = []
for j in range(4):
calendarPattern[i].append(j)
calendarPattern[i][j] = 1
# get time
timefrom = datetime.strptime(datefrom, '%Y-%m-%dT%H:%M:%S.000Z')
timeto = datetime.strptime(dateto, '%Y-%m-%dT%H:%M:%S.000Z')
# round minute to next or before quarter
startIndex = int(math.ceil(float(timefrom.minute) / 15.))
endIndex = int(float(timeto.minute) / 15.) - 1
# set timeframe to 0 = free
for i in range(timefrom.hour, timeto.hour + 1):
if i == timefrom.hour:
for j in range(startIndex, 4):
calendarPattern[i][j] = 0
elif i == timeto.hour:
for j in range(endIndex + 1):
calendarPattern[i][j] = 0
else:
for j in range(0, 4):
calendarPattern[i][j] = 0
else:
# set to free = 0 for hole day
for i in range(24):
calendarPattern.append(i)
calendarPattern[i] = []
for j in range(4):
calendarPattern[i].append(j)
calendarPattern[i][j] = 0
# pattern: [5][0] for 05:00, [5][1] for 05:15, [5][2] for 05:30,
# [5][3] for 05:45, [6][0] for 06:00 and so on
return calendarPattern
def matchCalendars(calendars):
"""
Matches calendars against each other
:param calendars: array containing all calendars to match
:type calendars: array
:return: Matched calendarpattern
:rtype: dict
"""
calendarPattern = []
for i in range(24):
calendarPattern.append(i)
calendarPattern[i] = []
for j in range(4):
calendarPattern[i].append(j)
calendarPattern[i][j] = 0
for calendar in calendars:
if calendar is not None:
for i in range(24):
for j in range(4):
if calendar[i][j] == 1:
calendarPattern[i][j] = 1
# available dates have value 1, busy dates 0
return calendarPattern
def getDateSuggestion(calendar,
duration,
timesSearched,
beginHour,
beginMinuteIndex,
endHour,
endHourIndex):
"""
Gets date suggestion from a filled calendar
:param calendar: The calendar to search for a free date
:type calendar: array (Calendarpattern)
:param duration: Needed duration of the free date to be searched in minutes
:type duration: int
:param timesSearched: Number of date suggestions to skip - when we already
searched two times we want to skip the first two
occurences
:type timesSearched: int
:param beginHour: Index of starting hour to be searched
:type beginHour: int
:param beginMinuteIndex: Index of starting quarter to be searched
(x times 15)
:type beginMinuteIndex: int
:param endHour: Index of ending hour to be searched
:type begiendHournHour: int
:param endMinuteIndex: Index of ending quarter to be searched (x times 15)
:type endMinuteIndex: int
:return: Hour and Minute of free appointment
:rtype: list
"""
if duration == 0 or duration is None:
duration = 15
# transfer duration to minute indezes
durationindezes = int(math.ceil(float(duration) / 15.))
if timesSearched is None:
timesSearched = 0
# take next date where all required (duration) indezes are 1
for i in range(beginHour, endHour):
if beginHour == i:
rangej = beginMinuteIndex
else:
rangej = 0
for j in range(rangej, 4):
if calendar[i][j] == 0:
founddate = True
# look for j + durationindezes values
for d in range(1, durationindezes):
n = j + d
if calendar[i + int(n / 4)][n % 4] == 1:
founddate = False
# set i and j to new values
# to skip already checked dates
i = i + int(n / 4)
j = n % 4
break
else:
continue
# hour = i, minute = j
if founddate:
if timesSearched == 0:
return [i, j * 15]
else:
# skip
i = i + int((durationindezes + j) / 4)
j = (durationindezes + j) % 4
timesSearched -= 1
return [timesSearched]
def suggestDate(
datefrom,
dateto,
duration,
users,
timesSearched,
beginHour,
beginMinuteIndex,
endHour,
endHourIndex,
cnx
):
"""
Gets calendars of users and calls the free date searching method
getDateSuggestion
:param datefrom: Starting datetime to be searched
:type datefrom: datetime
:param dateto: Ending datetime to be searched
:type dateto: datetime
:param duration: Needed duration of the free date to be searched in minutes
:type duration: int
:param users: List of Humhub User IDs to get claendars from
:type users: list
:param timesSearched: Number of date suggestions to skip - when we already
searched two times we want to skip the first two
occurences
:type timesSearched: int
:param beginHour: Index of starting hour to be searched
:type beginHour: int
:param beginMinuteIndex: Index of starting quarter to be searched
(x times 15)
:type beginMinuteIndex: int
:param endHour: Index of ending hour to be searched
:type begiendHournHour: int
:param endMinuteIndex: Index of ending quarter to be searched (x times 15)
:type endMinuteIndex: int
"""
calendarPattern = []
dtfrom = datetime.strptime(datefrom, '%Y-%m-%dT%H:%M:%S.000Z')
dtto = datetime.strptime(dateto, '%Y-%m-%dT%H:%M:%S.000Z')
while dtfrom < dtto:
calendarPattern = createCalendarPattern()
# get users calendars
calendars = []
auth = True
for user in users:
try:
calendar = getCalendar(user, dtfrom, cnx)
calendars.append(calendar)
except:
auth = False
if auth == False:
raise NotAuthenticatedError
# get free date
calendars.append(calendarPattern)
datesuggest = None
datesuggest = matchCalendars(calendars)
# gets hour and minute, needs to be combined with extracted date
suggestion = getDateSuggestion(
datesuggest,
duration,
timesSearched,
beginHour,
beginMinuteIndex,
endHour,
endHourIndex
)
if len(suggestion) == 1:
timesSearched = suggestion[0]
dtfrom = dtfrom + timedelta(days=1)
if len(suggestion) == 2:
return suggestion
return []
def getEndTime(datetime, duration):
"""
Gets end time of a free date suggestion using starting datetime and
duration
:param datetime: Starting datetime
:type datetime: datetime
:param duration: Duration in minutes
:type duration: int
:return: End datetime
:rtype: datetime
"""
# round duration minutes to next 15
duration = int(math.ceil(float(duration) / 15.)) * 15
durationhour = int(duration / 60)
durationminute = duration % 60
newEndHour = datetime.hour + durationhour
newEndMinute = durationminute + datetime.minute
while newEndMinute >= 60:
newEndHour += 1
newEndMinute = newEndMinute % 60
return datetime.replace(hour=newEndHour, minute=newEndMinute)
def getUserName(userID):
"""
Gets users firstname and lastname by user_id and returns as string
:return: Full username
:rtype: str
"""
firstname = ''
lastname = ''
global offlinemode
if offlinemode:
return "Christian Schmidt"
# search in humhub db
cnx = establishDBConnection(dbconfig)
cursor = cnx.cursor()
query = ("""SELECT firstname, lastname FROM profile WHERE user_id = {}
""").format(userID)
cursor.execute(query)
username = ''
for (firstname, lastname) in cursor:
username = firstname + " " + lastname
cnx.close()
return username
def bookdate(cnx, datefrom, duration, users):
"""
Books appointment in Humhub database
"""
# create calendar entry, duration in minutes
cursor = cnx.cursor()
datetimeNow = "'" + datetime.now().strftime("%Y-%m-%d %H:%M:%S") + "'"
if (datefrom.minute + duration) >= 60:
dateto = datefrom.replace(
hour=datefrom.hour + int((datefrom.minute + duration) / 60),
minute=int(datefrom.minute + duration) % 60
)
else:
dateto = datefrom.replace(minute=datefrom.minute + duration)
# create one calendar entry for each user
for user in users:
# get user names for description except own id
description = 'Termin mit '
for user2 in users:
if user is not user2:
description += getUserName(user2) + ", "
description = description[:-2]
# get container id
query = ("""SELECT `id` AS `cID` FROM `contentcontainer` WHERE
`class` = 'humhub\\\\modules\\\\user\\\\models\\\\User' AND
`pk` = %s AND `owner_user_id` = %s""")
data = (user, user)
cursor.execute(query, data)
for cID in cursor:
containerID = cID[0]
# create entry
query = (("""INSERT INTO calendar_entry(title, description,
start_datetime, end_datetime, all_day, participation_mode,
color, allow_decline, allow_maybe, time_zone,
participant_info, closed) VALUES
('Termin', '{}', {}, {}, 0, 2, '#59d6e4', 1, 1,
'Europe/Berlin', '', 0);""").format(
description,
str("'" + datefrom.strftime("%Y-%m-%d %H:%M:%S") + "'"),
str("'" + dateto.strftime("%Y-%m-%d %H:%M:%S") + "'")))
cursor.execute(query)
cnx.commit()
# get id of entry created
calendarEntryID = cursor.lastrowid
# insert activity
query = ("""INSERT INTO `activity`
(`class`, `module`, `object_model`, `object_id`)
VALUES (%s, 'content', %s, %s);""")
data = ('humhub\\modules\\content\\activities\\ContentCreated',
'humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID)
cursor.execute(query, data)
cnx.commit()
# insert participation
query = (("""INSERT INTO calendar_entry_participant
(calendar_entry_id, user_id, participation_state)
VALUES ({}, {}, 3);""").format(calendarEntryID, user))
cursor.execute(query)
cnx.commit()
query = ("""INSERT INTO `content`
(`guid`, `object_model`, `object_id`, `visibility`, `pinned`,
`archived`, `created_at`, `created_by`, `updated_at`,
`updated_by`, `contentcontainer_id`, `stream_sort_date`,
`stream_channel`) VALUES
(%s, %s, %s, 1, 0, '0', %s, 5, %s, 5, %s, %s, 'default');""")
data = (buildGUID(cnx),
'humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID,
datetimeNow,
datetimeNow,
containerID,
datetimeNow)
cursor.execute(query, data)
cnx.commit()
query = (("""INSERT INTO user_follow
(object_model, object_id, user_id, send_notifications)
VALUES (%s, %s, %s, 1);"""))
data = ('humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID,
user)
cursor.execute(query, data)
cnx.commit()
query = ("""INSERT INTO `activity`
(`class`, `module`, `object_model`, `object_id`)
VALUES (%s, 'calendar', %s, %s);""")
data = ('humhub\\modules\\calendar\\activities\\ResponseAttend',
'humhub\\modules\\calendar\\models\\CalendarEntry',
calendarEntryID)
cursor.execute(query, data)
cnx.commit()
activityID = cursor.lastrowid
query = ("""INSERT INTO `content`
(`guid`, `object_model`, `object_id`, `visibility`,
`pinned`, `archived`, `created_at`, `created_by`,
`updated_at`, `updated_by`, `contentcontainer_id`,
`stream_sort_date`, `stream_channel`) VALUES
(%s, %s, %s, 1, 0, '0', %s, 5, %s, 5, 5, %s,
'activity');""")
data = (buildGUID(cnx),
'humhub\\modules\\activity\\models\\Activity',
activityID,
datetimeNow,
datetimeNow,
datetimeNow)
cursor.execute(query, data)
cnx.commit()
return []
def buildGUID(cnx):
"""
Builds GUID needed for content table in Humhub db
"""
unique = 0
while (unique == 0):
match = None
while (match is None):
hexstr = str(
os.urandom(4).encode('hex') +
"-" + os.urandom(2).encode('hex') +
"-" + hex(random.randint(0, 0x0fff) | 0x4000)[2:] +
"-" + hex(random.randint(0, 0x3fff) | 0x8000)[2:] +
"-" + os.urandom(6).encode('hex')
)
match = re.search(
'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}',
hexstr
)
hexstr = "'" + hexstr + "'"
# check if GUID is already used
cursor = cnx.cursor(buffered=True)
query = "SELECT id FROM `content` WHERE guid = {}".format(hexstr)
cursor.execute(query)
if cursor.rowcount == 0:
unique = 1
else:
unique = 0
return hexstr
def searchCompetence(search, dictionary):
"""
Returns the path for a competence from general competence to searched
competence.
"""
for competence in dictionary:
if (
(
'competence' in competence and
stemmer.stem(competence['competence'])
== stemmer.stem(search.lower())
)
or
(
'synonyms' in competence and
stemmer.stem(search.lower()) in [
stemmer.stem(syn) for syn in competence['synonyms']
]
)
):
return [competence['competence']]
else:
try:
if 'subcategories' in competence:
return (
searchCompetence(search, competence['subcategories']) +
[competence['competence']])
except ValueError:
pass
raise ValueError("Not found")
def getUserCompetencies(cnx, exceptUserIDs):
"""
Returns array of persons with their competences as values
"""
competencies = {}
cnx = establishDBConnection(dbconfig)
cursor = cnx.cursor()
placeholder = '%s'
placeholders = ', '.join(placeholder for unused in exceptUserIDs)
query = ("""SELECT firstname, lastname, competence FROM profile
WHERE user_id NOT IN ({}) AND competence IS NOT NULL
""").format(placeholders)
cursor.execute(query, tuple(exceptUserIDs))
cnx.close()
for (firstname, lastname, competence) in cursor:
competencies[firstname + " " + lastname] = (
[comp.strip().lower() for comp in competence.split(',')]
)
return competencies
def getUsersWithCompetencies(categories, usercompetencies):
"""
Lists competences and their corresponding user IDs and returns the user ID
matching the needed competence
:param categories: Needed competence category
:type categories: list
:param usercompetencies: User IDs and their competences
:type usercompetencies: dict
"""
# user -> competence ==> competence -> users
competencies = {}
for user in usercompetencies:
for competence in usercompetencies[user]:
if competence not in competencies:
competencies[competence] = []
competencies[competence].append(user)
else:
competencies[competence].append(user)
# search for competence
for competence in categories: # from special to general
if competence in competencies:
# returns users matching requested competency
return {
"competence": competence,
"users": competencies[competence]
}
return None
def getMatchingCompetence(dictionary, lastmessage):
"""
Searches for a competence in a string
"""
allCompetences = getAllCompetences(dictionary)
searchedCompetence = []
for word in re.split('[ .!?]', lastmessage):
if stemmer.stem(word.strip().lower()) in [
stemmer.stem(comp) for comp in allCompetences]:
searchedCompetence.append(word.strip().lower())
return searchedCompetence
def getAllCompetences(dictionary, competences=[]):
"""
Gets all competences and synonyms in competence dictionary without
hirarchical list
"""
for competence in dictionary:
competences.append(competence['competence'])
if 'synonyms' in competence:
for synonym in competence['synonyms']:
competences.append(synonym)
if 'subcategories' in competence:
getAllCompetences(competence['subcategories'], competences)
return competences
def getUserID(person):
"""
Gets Humhub User ID using name information
:param person: Name of the person to get the Humhub User ID for
:type person: str.
"""
# search for person string in humhub db
# switch case for only one name (propably lastname) or
# two separate strings (firstname + lastname)
firstname = ''
lastname = ''
if len(person.split()) == 1:
# only lastname
lastname = person
else:
firstname = person.split()[0]
lastname = person.split()[1]
global offlinemode
if offlinemode:
return 8
# search in humhub db
cnx = establishDBConnection(dbconfig)
cursor = cnx.cursor()
query = ''
if firstname == '':
query = ("""SELECT user_id FROM profile WHERE lastname = {}
""").format(lastname)
else:
query = ("""SELECT user_id FROM profile WHERE firstname = {}
AND lastname = {}
""").format(firstname, lastname)
cursor.execute(query)
for user_id in cursor:
userid = user_id
cnx.close()
return userid
| 34.952954 | 171 | 0.588099 | 445 | 0.013929 | 0 | 0 | 0 | 0 | 0 | 0 | 15,174 | 0.474974 |
a60efe42e9f5e8c55bf82adbc77eeb0fe033f6f5 | 857 | py | Python | tf_pose/test.py | Artia-Inspirenet/module-2 | 7cf1d74f13d23a11ce202436d88b283d7ef1e109 | [
"Apache-2.0"
] | null | null | null | tf_pose/test.py | Artia-Inspirenet/module-2 | 7cf1d74f13d23a11ce202436d88b283d7ef1e109 | [
"Apache-2.0"
] | null | null | null | tf_pose/test.py | Artia-Inspirenet/module-2 | 7cf1d74f13d23a11ce202436d88b283d7ef1e109 | [
"Apache-2.0"
] | null | null | null | from pycocotools.coco import COCO
import json
with open('person_keypoints_val2017.json') as f:
data = json.load(f)
coco = COCO('person_keypoints_val2017.json')
def search(id):
for annotation in data['annotations']:
if annotation['image_id']==id:
print(annotation)
#print(data['images'])
keys = list(coco.imgs.keys())
img_idx = coco.imgs[keys[0]]['id']
print(img_idx)
ann_idx = coco.getAnnIds(imgIds=img_idx)
print(ann_idx[0].__class__)
annotations = coco.loadAnns(ann_idx)
for ann in annotations:
print(ann.get('num_keypoints', 0))
# image_ids = {}
# for anno in data['annotations']:
# if anno['image_id'] in image_ids.keys():
# image_ids[anno['image_id']] += 1
# else:
# image_ids[anno['image_id']] = 0
# for i in image_ids:
# if image_ids[i] > 1:
# print(i)
# search(8021) | 20.902439 | 48 | 0.649942 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 388 | 0.452742 |
a60f536104bc0bea65187e1ff26469082c3a5ca1 | 10,344 | py | Python | py/pysparkling/ml/algo.py | gerbenoostra/sparkling-water | 0e996b80124bf6cf4bfb2cd274625f3ddb7bd9fb | [
"Apache-2.0"
] | null | null | null | py/pysparkling/ml/algo.py | gerbenoostra/sparkling-water | 0e996b80124bf6cf4bfb2cd274625f3ddb7bd9fb | [
"Apache-2.0"
] | null | null | null | py/pysparkling/ml/algo.py | gerbenoostra/sparkling-water | 0e996b80124bf6cf4bfb2cd274625f3ddb7bd9fb | [
"Apache-2.0"
] | null | null | null | from pyspark import since, keyword_only
from pyspark.ml.param.shared import *
from pyspark.ml.util import JavaMLReadable, JavaMLWritable
from pyspark.ml.wrapper import JavaEstimator, JavaModel, JavaTransformer, _jvm
from pyspark.sql import SparkSession
from pysparkling import *
from .params import H2OGBMParams, H2ODeepLearningParams, H2OAutoMLParams
java_max_double_value = (2-2**(-52))*(2**1023)
def set_double_values(kwargs, values):
for v in values:
if v in kwargs:
kwargs[v] = float(kwargs[v])
class H2OGBM(JavaEstimator, H2OGBMParams, JavaMLReadable, JavaMLWritable):
@keyword_only
def __init__(self, ratio=1.0, predictionCol=None, featuresCols=[], allStringColumnsToCategorical=True, nfolds=0,
keepCrossValidationPredictions=False, keepCrossValidationFoldAssignment=False, parallelizeCrossValidation=True,
seed=-1, distribution="AUTO", ntrees=50, maxDepth=5, minRows=10.0, nbins=20, nbinsCats=1024, minSplitImprovement=1e-5,
histogramType="AUTO", r2Stopping=java_max_double_value,
nbinsTopLevel=1<<10, buildTreeOneNode=False, scoreTreeInterval=0,
sampleRate=1.0, sampleRatePerClass=None, colSampleRateChangePerLevel=1.0, colSampleRatePerTree=1.0,
learnRate=0.1, learnRateAnnealing=1.0, colSampleRate=1.0, maxAbsLeafnodePred=java_max_double_value,
predNoiseBandwidth=0.0, convertUnknownCategoricalLevelsToNa=False):
super(H2OGBM, self).__init__()
self._hc = H2OContext.getOrCreate(SparkSession.builder.getOrCreate(), verbose=False)
self._java_obj = self._new_java_obj("org.apache.spark.ml.h2o.algos.H2OGBM",
self.uid,
self._hc._jhc.h2oContext(),
self._hc._jsql_context)
self._setDefault(ratio=1.0, predictionCol=None, featuresCols=[], allStringColumnsToCategorical=True,
nfolds=0, keepCrossValidationPredictions=False, keepCrossValidationFoldAssignment=False, parallelizeCrossValidation=True,
seed=-1, distribution=self._hc._jvm.hex.genmodel.utils.DistributionFamily.valueOf("AUTO"),
ntrees=50, maxDepth=5, minRows=10.0, nbins=20, nbinsCats=1024, minSplitImprovement=1e-5,
histogramType=self._hc._jvm.hex.tree.SharedTreeModel.SharedTreeParameters.HistogramType.valueOf("AUTO"),
r2Stopping=self._hc._jvm.Double.MAX_VALUE, nbinsTopLevel=1<<10, buildTreeOneNode=False, scoreTreeInterval=0,
sampleRate=1.0, sampleRatePerClass=None, colSampleRateChangePerLevel=1.0, colSampleRatePerTree=1.0,
learnRate=0.1, learnRateAnnealing=1.0, colSampleRate=1.0, maxAbsLeafnodePred=self._hc._jvm.Double.MAX_VALUE,
predNoiseBandwidth=0.0, convertUnknownCategoricalLevelsToNa=False)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
def setParams(self, ratio=1.0, predictionCol=None, featuresCols=[], allStringColumnsToCategorical=True,
nfolds=0, keepCrossValidationPredictions=False, keepCrossValidationFoldAssignment=False,parallelizeCrossValidation=True,
seed=-1, distribution="AUTO", ntrees=50, maxDepth=5, minRows=10.0, nbins=20, nbinsCats=1024, minSplitImprovement=1e-5,
histogramType="AUTO", r2Stopping=java_max_double_value,
nbinsTopLevel=1<<10, buildTreeOneNode=False, scoreTreeInterval=0,
sampleRate=1.0, sampleRatePerClass=None, colSampleRateChangePerLevel=1.0, colSampleRatePerTree=1.0,
learnRate=0.1, learnRateAnnealing=1.0, colSampleRate=1.0, maxAbsLeafnodePred=java_max_double_value,
predNoiseBandwidth=0.0, convertUnknownCategoricalLevelsToNa=False):
kwargs = self._input_kwargs
if "distribution" in kwargs:
kwargs["distribution"] = self._hc._jvm.hex.genmodel.utils.DistributionFamily.valueOf(kwargs["distribution"])
if "histogramType" in kwargs:
kwargs["histogramType"] = self._hc._jvm.hex.tree.SharedTreeModel.SharedTreeParameters.HistogramType.valueOf(kwargs["histogramType"])
# we need to convert double arguments manually to floats as if we assign integer to double, py4j thinks that
# the whole type is actually int and we get class cast exception
double_types = ["minRows", "predNoiseBandwidth", "ratio", "learnRate", "colSampleRate", "learnRateAnnealing", "maxAbsLeafnodePred"
"minSplitImprovement", "r2Stopping", "sampleRate", "colSampleRateChangePerLevel", "colSampleRatePerTree"]
set_double_values(kwargs, double_types)
# We need to also map all doubles in the arrays
if "sampleRatePerClass" in kwargs:
kwargs["sampleRatePerClass"] = map(float, kwargs["sampleRatePerClass"])
return self._set(**kwargs)
def _create_model(self, java_model):
return H2OGBMModel(java_model)
class H2OGBMModel(JavaModel, JavaMLWritable, JavaMLReadable):
pass
class H2ODeepLearning(JavaEstimator, H2ODeepLearningParams, JavaMLReadable, JavaMLWritable):
@keyword_only
def __init__(self, ratio=1.0, predictionCol=None, featuresCols=[], allStringColumnsToCategorical=True,
nfolds=0, keepCrossValidationPredictions=False, keepCrossValidationFoldAssignment=False, parallelizeCrossValidation=True,
seed=-1, distribution="AUTO", epochs=10.0, l1=0.0, l2=0.0, hidden=[200,200], reproducible=False,
convertUnknownCategoricalLevelsToNa=False):
super(H2ODeepLearning, self).__init__()
self._hc = H2OContext.getOrCreate(SparkSession.builder.getOrCreate(), verbose=False)
self._java_obj = self._new_java_obj("org.apache.spark.ml.h2o.algos.H2ODeepLearning",
self.uid,
self._hc._jhc.h2oContext(),
self._hc._jsql_context)
self._setDefault(ratio=1.0, predictionCol=None, featuresCols=[], allStringColumnsToCategorical=True,
nfolds=0, keepCrossValidationPredictions=False, keepCrossValidationFoldAssignment=False, parallelizeCrossValidation=True,
seed=-1, distribution=self._hc._jvm.hex.genmodel.utils.DistributionFamily.valueOf("AUTO"),
epochs=10.0, l1=0.0, l2=0.0, hidden=[200,200], reproducible=False, convertUnknownCategoricalLevelsToNa=False)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
def setParams(self, ratio=1.0, predictionCol=None, featuresCols=[], allStringColumnsToCategorical=True,
nfolds=0, keepCrossValidationPredictions=False, keepCrossValidationFoldAssignment=False, parallelizeCrossValidation=True,
seed=-1, distribution="AUTO", epochs=10.0, l1=0.0, l2=0.0, hidden=[200,200], reproducible=False, convertUnknownCategoricalLevelsToNa=False):
kwargs = self._input_kwargs
if "distribution" in kwargs:
kwargs["distribution"] = self._hc._jvm.hex.genmodel.utils.DistributionFamily.valueOf(kwargs["distribution"])
# we need to convert double arguments manually to floats as if we assign integer to double, py4j thinks that
# the whole type is actually int and we get class cast exception
double_types = ["ratio", "epochs", "l1", "l2"]
set_double_values(kwargs, double_types)
return self._set(**kwargs)
def _create_model(self, java_model):
return H2ODeepLearningModel(java_model)
class H2ODeepLearningModel(JavaModel, JavaMLWritable, JavaMLReadable):
pass
class H2OAutoML(JavaEstimator, H2OAutoMLParams, JavaMLWritable, JavaMLReadable):
@keyword_only
def __init__(self, predictionCol=None, allStringColumnsToCategorical=True, ratio=1.0, foldColumn=None, weightsColumn=None,
ignoredColumns=[], tryMutations=True, excludeAlgos=None, projectName=None, loss="AUTO", maxRuntimeSecs=3600.0, stoppingRounds=3,
stoppingTolerance=0.001, stoppingMetric="AUTO", nfolds=5, convertUnknownCategoricalLevelsToNa=False):
super(H2OAutoML, self).__init__()
self._hc = H2OContext.getOrCreate(SparkSession.builder.getOrCreate(), verbose=False)
self._java_obj = self._new_java_obj("org.apache.spark.ml.h2o.algos.H2OAutoML",
self.uid,
self._hc._jhc.h2oContext(),
self._hc._jsql_context)
self._setDefault(predictionCol=None, allStringColumnsToCategorical=True, ratio=1.0, foldColumn=None, weightsColumn=None,
ignoredColumns=[], tryMutations=True, excludeAlgos=None, projectName=None, loss="AUTO", maxRuntimeSecs=3600.0, stoppingRounds=3,
stoppingTolerance=0.001, stoppingMetric=self._hc._jvm.hex.ScoreKeeper.StoppingMetric.valueOf("AUTO"), nfolds=5, convertUnknownCategoricalLevelsToNa=False)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
def setParams(self, predictionCol=None, allStringColumnsToCategorical=True, ratio=1.0, foldColumn=None, weightsColumn=None,
ignoredColumns=[], tryMutations=True, excludeAlgos=None, projectName=None, loss="AUTO", maxRuntimeSecs=3600.0, stoppingRounds=3,
stoppingTolerance=0.001, stoppingMetric="AUTO", nfolds=5, convertUnknownCategoricalLevelsToNa=False):
kwargs = self._input_kwargs
if "stoppingMetric" in kwargs:
kwargs["stoppingMetric"] = self._hc._jvm.hex.ScoreKeeper.StoppingMetric.valueOf(kwargs["stoppingMetric"])
# we need to convert double arguments manually to floats as if we assign integer to double, py4j thinks that
double_types = ["maxRuntimeSecs", "stoppingTolerance", "ratio"]
set_double_values(kwargs, double_types)
return self._set(**kwargs)
def _create_model(self, java_model):
return H2OAutoMLModel(java_model)
class H2OAutoMLModel(JavaModel, JavaMLWritable, JavaMLReadable):
pass | 64.248447 | 179 | 0.688128 | 9,806 | 0.947989 | 0 | 0 | 9,048 | 0.87471 | 0 | 0 | 1,215 | 0.117459 |
a60f692d9e581d0d010585ae2ed188489a8ee8ec | 497 | py | Python | bonds/users/templatetags/email_extras.py | ghostforpy/bonds-docker | fda77225b85264cb4ba06b15ff63bc807858425a | [
"MIT"
] | 2 | 2020-09-08T12:51:56.000Z | 2021-08-18T15:27:52.000Z | bonds/users/templatetags/email_extras.py | ghostforpy/bonds-docker | fda77225b85264cb4ba06b15ff63bc807858425a | [
"MIT"
] | 1 | 2021-12-13T20:43:35.000Z | 2021-12-13T20:43:35.000Z | bonds/users/templatetags/email_extras.py | ghostforpy/bonds-docker | fda77225b85264cb4ba06b15ff63bc807858425a | [
"MIT"
] | null | null | null | from django import template
# from ..models import *
# from bonds.friends.models import UserFriends
# from django.core.exceptions import ObjectDoesNotExist
register = template.Library()
@register.filter(name='return_number_with_sign')
def return_number_with_sign(num):
return "{0:+.2f}".format(float(num))
@register.filter(name='return_color')
def return_color(i):
i = float(i)
if i < 0:
return 'red'
elif i > 0:
return 'green'
else:
return 'gray'
| 21.608696 | 55 | 0.684105 | 0 | 0 | 0 | 0 | 304 | 0.61167 | 0 | 0 | 192 | 0.386318 |
a61004468602027708717a182ff1b7708af746b8 | 1,907 | py | Python | mongox/fields.py | Collector0/mongox | 40fc6b7076c959e7bf8c001527584891fca8631c | [
"MIT"
] | 1 | 2021-11-13T15:03:04.000Z | 2021-11-13T15:03:04.000Z | mongox/fields.py | ischaojie/mongox | 29e1dc3cb5f34190f6678c727132776068a09655 | [
"MIT"
] | null | null | null | mongox/fields.py | ischaojie/mongox | 29e1dc3cb5f34190f6678c727132776068a09655 | [
"MIT"
] | null | null | null | import typing
import bson
from pydantic import Field
from pydantic.fields import ModelField as PydanticModelField
__all__ = ["Field", "ObjectId"]
class ObjectId(bson.ObjectId):
"""
Pydantic ObjectId field with validators
"""
@classmethod
def __get_validators__(cls) -> typing.Generator[bson.ObjectId, None, None]:
yield cls.validate
@classmethod
def validate(cls, v: typing.Any) -> bson.ObjectId:
if not bson.ObjectId.is_valid(v):
raise ValueError("Invalid ObjectId")
return bson.ObjectId(v)
@classmethod
def __modify_schema__(cls, field_schema: dict) -> None:
field_schema.update(type="string")
class ModelField(PydanticModelField):
"""
Custom ModelField to create query building
"""
__slots__: typing.Tuple[str, ...] = tuple()
def __lt__(
self, other: typing.Any
) -> typing.Dict[str, typing.Dict[str, typing.Any]]:
return {self.name: {"$lt": other}}
def __le__(
self, other: typing.Any
) -> typing.Dict[str, typing.Dict[str, typing.Any]]:
return {self.name: {"$lte": other}}
def __eq__( # type: ignore[override]
self, other: typing.Any
) -> typing.Dict[str, typing.Dict[str, typing.Any]]:
# Using $eq instead of simple dict to allow regex
return {self.name: {"$eq": other}}
def __ne__( # type: ignore[override]
self, other: typing.Any
) -> typing.Dict[str, typing.Dict[str, typing.Any]]:
return {self.name: {"$ne": other}}
def __gt__(
self, other: typing.Any
) -> typing.Dict[str, typing.Dict[str, typing.Any]]:
return {self.name: {"$gt": other}}
def __ge__(
self, other: typing.Any
) -> typing.Dict[str, typing.Dict[str, typing.Any]]:
return {self.name: {"$gte": other}}
def __hash__(self) -> int:
return super().__hash__()
| 27.242857 | 79 | 0.617724 | 1,753 | 0.919245 | 102 | 0.053487 | 424 | 0.222339 | 0 | 0 | 285 | 0.149449 |
a61085775c1d0af520f40b38412e987086bb99a9 | 1,007 | py | Python | cogs/System.py | Naman-Biswajit/Discord-Bot-Perceus | 338826e085653e3cd4f972bfb156a7ca129c1f49 | [
"MIT"
] | null | null | null | cogs/System.py | Naman-Biswajit/Discord-Bot-Perceus | 338826e085653e3cd4f972bfb156a7ca129c1f49 | [
"MIT"
] | null | null | null | cogs/System.py | Naman-Biswajit/Discord-Bot-Perceus | 338826e085653e3cd4f972bfb156a7ca129c1f49 | [
"MIT"
] | null | null | null | import discord
from discord.ext import commands
class System(commands.Cog):
def __init__(self, perceus):
self.perceus = perceus
@commands.Cog.listener()
async def on_ready(self):
print('Logged in as: ')
print(self.perceus.user.name)
print(self.perceus.user.id)
print('--------------')
@commands.command()
async def load(self, ctx, cogname):
self.perceus.load_extension(f'cogs.{cogname}')
await ctx.send(f'Loaded {cogname}')
@commands.command()
async def unload(self, ctx, cogname):
if cogname in ['System']:
return await ctx.send('You cannot unload system')
self.perceus.unload_extension(f'cogs.{cogname}')
await ctx.send(f'Unloaded {cogname}')
@commands.command()
async def reload(self, ctx, cogname):
self.perceus.reload_extension(f'cogs.{cogname}')
await ctx.send(f'Reloaded {cogname}')
def setup(perceus):
perceus.add_cog(System(perceus)) | 27.972222 | 61 | 0.625621 | 890 | 0.883813 | 0 | 0 | 775 | 0.769613 | 674 | 0.669315 | 178 | 0.176763 |
a611c23032426651706ecd95f602bc143309b886 | 3,586 | py | Python | command.py | floydawong/tomato_time | 0fc16a201ea4c3816499a998eaa72d43958c9f2a | [
"MIT"
] | null | null | null | command.py | floydawong/tomato_time | 0fc16a201ea4c3816499a998eaa72d43958c9f2a | [
"MIT"
] | null | null | null | command.py | floydawong/tomato_time | 0fc16a201ea4c3816499a998eaa72d43958c9f2a | [
"MIT"
] | null | null | null | import sublime_plugin
from .tomato_time import get_tomato
class CreateTomatoCommand(sublime_plugin.TextCommand):
def show_desc_panel(self):
window = self.view.window()
caption = "Tomato Time Description:"
def on_done(desc):
self.tomato.set_desc(desc)
self.tomato.set_tag(self.tag)
self.tomato.start()
window.show_input_panel(caption, self.tomato.get_desc(), on_done, None, None)
def create_tag(self):
window = self.view.window()
caption = "New Tag's Name:"
def on_done(name):
self.tomato.create_tag(name)
self.show_tags_panel()
window.show_input_panel(caption, "", on_done, None, None)
def delete_tag(self):
window = self.view.window()
items = []
tags = self.tomato.get_tags()
if len(tags) == 0:
self.show_tags_panel()
return
for t in tags:
items.append(": %s" % t)
def on_select(index):
if index < 0:
return
self.tomato.delete_tag(tags[index])
self.show_tags_panel()
window.show_quick_panel(items, on_select)
def show_tags_panel(self):
window = self.view.window()
items = []
tag = self.tomato.get_tag()
desc = self.tomato.get_desc()
if tag:
items.append("Go on with last tomato: [%s] %s" % (tag, desc))
else:
items.append("Go on with last tomato: %s" % (desc))
items.append("Discard Tag")
items.append("Create Tag")
items.append("Delete Tag")
tags = self.tomato.get_tags()
for t in tags:
items.append(": %s" % t)
def on_select(index):
if index < 0:
return
if index == 0:
self.tag = self.tomato.get_tag()
self.show_desc_panel()
return
if index == 1:
self.tag = None
self.show_desc_panel()
return
if index == 2:
self.create_tag()
return
if index == 3:
self.delete_tag()
return
self.tag = tags[index - 4]
self.show_desc_panel()
window.show_quick_panel(items, on_select)
def run(self, edit):
self.tomato = get_tomato()
self.tag = None
self.show_tags_panel()
class DiscardTomatoCommand(sublime_plugin.TextCommand):
def run(self, edit):
tomato = get_tomato()
tomato.discard()
def is_visible(self):
tomato = get_tomato()
return tomato.is_actived()
class ShowTomatoProgressCommand(sublime_plugin.TextCommand):
def run(self, edit):
tomato = get_tomato()
tomato.set_status_visiable(True)
def is_visible(self):
tomato = get_tomato()
return tomato.is_actived() and not tomato.get_status_visiable()
class HideTomatoProgressCommand(sublime_plugin.TextCommand):
def run(self, edit):
tomato = get_tomato()
tomato.set_status_visiable(False)
def is_visible(self):
tomato = get_tomato()
return tomato.is_actived() and tomato.get_status_visiable()
class ShowCompleteRecordsCommand(sublime_plugin.TextCommand):
def run(self, edit):
tomato = get_tomato()
tomato.show_records()
class ClearCompleteRecordsCommand(sublime_plugin.TextCommand):
def run(self, edit):
tomato = get_tomato()
tomato.clear_records()
| 27.374046 | 85 | 0.573341 | 3,510 | 0.978806 | 0 | 0 | 0 | 0 | 0 | 0 | 155 | 0.043224 |
a611cacdfe5dbb06f96220faa6f86ba176c65626 | 918 | py | Python | Pacote Download/Ex043_12_IMC.py | BrunoCruzIglesias/Python | 01465632a8471271e994eb4565a14a547db6578d | [
"MIT"
] | null | null | null | Pacote Download/Ex043_12_IMC.py | BrunoCruzIglesias/Python | 01465632a8471271e994eb4565a14a547db6578d | [
"MIT"
] | null | null | null | Pacote Download/Ex043_12_IMC.py | BrunoCruzIglesias/Python | 01465632a8471271e994eb4565a14a547db6578d | [
"MIT"
] | null | null | null | # Cálculo de IMC
print("Vamos calcular seu IMC?")
nome = input("Digite o seu nome: ")
peso = float(input("Olá, {}, agora digite seu peso (em Kg, Ex: 68.9): " .format(nome)))
altura = float(input("Digite sua altura (em m, Ex: 1.80): "))
media = peso / (altura * altura)
print('Seu IMC é: {:.1f}'.format(media))
if media < 18.5:
print("Abaixo do peso")
elif media >= 18.5 and media <= 24.9: # poderia ser feito: elif 18.5 <= media < 25: /o python aceita esse tipo/
print("Você está no seu peso ideal")
elif media>=25.0 and media<=29.9: # elif 25 <= media < 30:
print("Você está levemente acima do peso")
elif media>=30.0 and media<=34.9: # elif 30 <= media < 35:
print("Obesidade Grau 1")
elif media>=35.0 and media<=39.9: # elif 35 <= media < 40:
print("Obesidade Grau 2")
else:
print("Obesidade grau 3(mórbida)") | 45.9 | 117 | 0.581699 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 529 | 0.571274 |
a611d310e0340c190260eaa06e81db9198238521 | 399 | py | Python | 2020/day2/password.py | DanielKillenberger/AdventOfCode | e9b40c1ae09ee4bffbbf6acca1a2778aed5f1561 | [
"MIT"
] | null | null | null | 2020/day2/password.py | DanielKillenberger/AdventOfCode | e9b40c1ae09ee4bffbbf6acca1a2778aed5f1561 | [
"MIT"
] | null | null | null | 2020/day2/password.py | DanielKillenberger/AdventOfCode | e9b40c1ae09ee4bffbbf6acca1a2778aed5f1561 | [
"MIT"
] | null | null | null | with open("input.txt", "r") as input_file:
input = input_file.read().split("\n")
passwords = list(map(lambda line: [list(map(int, line.split(" ")[0].split("-"))), line.split(" ")[1][0], line.split(" ")[2]], input))
valid = 0
for password in passwords:
count_letter = password[2].count(password[1])
if password[0][0] <= count_letter <= password[0][1]:
valid += 1
print(valid)
| 28.5 | 133 | 0.611529 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 30 | 0.075188 |
a611ec7c6de69b5cd4939c8bb49718b4e41ba387 | 4,361 | py | Python | scripts/gen_cert.py | SUNET/sunet-auth-server | a85256917c4159fd1c8eb1f6a6ed28777b64d403 | [
"BSD-2-Clause"
] | 1 | 2021-05-26T02:37:10.000Z | 2021-05-26T02:37:10.000Z | scripts/gen_cert.py | SUNET/sunet-auth-server | a85256917c4159fd1c8eb1f6a6ed28777b64d403 | [
"BSD-2-Clause"
] | 1 | 2021-05-17T08:32:15.000Z | 2021-05-17T08:32:16.000Z | scripts/gen_cert.py | SUNET/sunet-auth-server | a85256917c4159fd1c8eb1f6a6ed28777b64d403 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import argparse
import os
import sys
from base64 import b64encode
from datetime import datetime, timedelta
from cryptography import x509
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.hashes import SHA256
from cryptography.x509 import NameOID
__author__ = 'lundberg'
def main(args: argparse.Namespace):
# Generate key
key = rsa.generate_private_key(public_exponent=65537, key_size=4096)
passphrase = serialization.NoEncryption()
if args.passphrase is not None:
passphrase = serialization.BestAvailableEncryption(args.passphrase.encode())
private_bytes = key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL,
encryption_algorithm=passphrase,
)
# Write key
if args.out is not None:
key_path = f'{args.out}{os.sep}{args.common_name}.key'
if os.path.exists(key_path):
sys.stderr.write(f'{key_path} already exists\n')
sys.exit(1)
with open(key_path, 'wb') as f:
f.write(private_bytes)
else:
sys.stdout.writelines(f'Private key for {args.common_name}:\n')
sys.stdout.writelines(private_bytes.decode('utf-8'))
sys.stdout.writelines('\n')
# Various details about who we are. For a self-signed certificate the
# subject and issuer are always the same.
subject = issuer = x509.Name(
[
x509.NameAttribute(NameOID.COUNTRY_NAME, args.country),
x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, args.province),
x509.NameAttribute(NameOID.LOCALITY_NAME, args.locality),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, args.organization),
x509.NameAttribute(NameOID.COMMON_NAME, args.common_name),
]
)
alt_names = [x509.DNSName(alt_name) for alt_name in args.alt_names]
cert = (
x509.CertificateBuilder()
.subject_name(subject)
.issuer_name(issuer)
.public_key(key.public_key())
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.utcnow())
.not_valid_after(datetime.utcnow() + timedelta(days=args.expires))
.add_extension(
x509.SubjectAlternativeName(alt_names),
critical=False,
# Sign our certificate with our private key
)
.sign(key, hashes.SHA256())
)
public_bytes = cert.public_bytes(serialization.Encoding.PEM)
# Write certificate
if args.out is not None:
cert_path = f'{args.out}{os.sep}{args.common_name}.crt'
if os.path.exists(cert_path):
sys.stderr.write(f'{cert_path} already exists\n')
sys.exit(1)
with open(cert_path, 'wb') as f:
f.write(public_bytes)
else:
sys.stdout.writelines(f'Certificate for {args.common_name}:\n')
sys.stdout.writelines(public_bytes.decode('utf-8'))
sys.stdout.writelines('\n')
# Print additional info
sys.stdout.writelines('cert#S256 fingerprint:\n')
sys.stdout.writelines(b64encode(cert.fingerprint(algorithm=SHA256())).decode('utf-8'))
sys.stdout.writelines('\n')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate key and cert')
parser.add_argument('--country', '-c', default='SE', help='country (default: SE)', type=str)
parser.add_argument('--province', '-p', default='Stockholm', help='province (default: Stockholm)', type=str)
parser.add_argument('--locality', '-l', default='Stockholm', help='locality (default: Stockholm)', type=str)
parser.add_argument('--organization', '-o', default='Sunet', help='organization (default: Sunet)', type=str)
parser.add_argument('--common-name', '-cn', help='common name', type=str, required=True)
parser.add_argument('--expires', '-e', default=365, help='expires in X days (default: 365)', type=int)
parser.add_argument('--alt-names', help='alternative names', nargs='*', default=[], type=str)
parser.add_argument('--passphrase', help='passphrase for key', nargs='?', default=None, type=str)
parser.add_argument('--out', help='output directory', nargs='?', default=None, type=str)
main(args=parser.parse_args())
| 43.178218 | 112 | 0.677368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 978 | 0.22426 |
a611f0ca7daf79a9c49d9bcc01c2d037698b5b57 | 574 | py | Python | tests/apis/test_files.py | ninoseki/uzen | 93726f22f43902e17b22dd36142dac05171d0d84 | [
"MIT"
] | 76 | 2020-02-27T06:36:27.000Z | 2022-03-10T20:18:03.000Z | tests/apis/test_files.py | ninoseki/uzen | 93726f22f43902e17b22dd36142dac05171d0d84 | [
"MIT"
] | 33 | 2020-03-13T02:04:14.000Z | 2022-03-04T02:06:11.000Z | tests/apis/test_files.py | ninoseki/uzen | 93726f22f43902e17b22dd36142dac05171d0d84 | [
"MIT"
] | 6 | 2020-03-17T16:42:25.000Z | 2021-04-27T06:35:46.000Z | import asyncio
import pytest
from fastapi.testclient import TestClient
from app.models.script import Script
@pytest.mark.usefixtures("scripts_setup")
def test_files(client: TestClient, event_loop: asyncio.AbstractEventLoop):
first = event_loop.run_until_complete(Script.all().first())
sha256 = first.file_id
response = client.get(f"/api/files/{sha256}")
assert response.status_code == 200
def test_files_404(client: TestClient, event_loop: asyncio.AbstractEventLoop):
response = client.get("/api/files/404")
assert response.status_code == 404
| 27.333333 | 78 | 0.763066 | 0 | 0 | 0 | 0 | 297 | 0.517422 | 0 | 0 | 53 | 0.092334 |
a612ca73184d6c4eb4704ff1a1c002a934e7280d | 1,384 | py | Python | src/compas_ui/rhino/forms/info.py | BlockResearchGroup/compas_ui | 8b5a6121eee837d306bf20c44c91f94a5c185f90 | [
"MIT"
] | null | null | null | src/compas_ui/rhino/forms/info.py | BlockResearchGroup/compas_ui | 8b5a6121eee837d306bf20c44c91f94a5c185f90 | [
"MIT"
] | 3 | 2022-02-24T17:56:30.000Z | 2022-03-31T09:48:40.000Z | src/compas_ui/rhino/forms/info.py | BlockResearchGroup/compas_ui | 8b5a6121eee837d306bf20c44c91f94a5c185f90 | [
"MIT"
] | null | null | null | from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import Eto.Drawing
import Eto.Forms
import Rhino.UI
import Rhino
class InfoForm(Eto.Forms.Dialog[bool]):
def __init__(self, text, title="Info", width=800, height=500):
self.Title = title
self.Padding = Eto.Drawing.Padding(0)
self.Resizable = True
self.MinimumSize = Eto.Drawing.Size(0.5 * width, 0.5 * height)
self.ClientSize = Eto.Drawing.Size(width, height)
textarea = Eto.Forms.TextArea()
textarea.Text = text
textarea.ReadOnly = True
layout = Eto.Forms.DynamicLayout()
layout.BeginVertical(
Eto.Drawing.Padding(12, 12, 12, 0), Eto.Drawing.Size(0, 0), True, True
)
layout.AddRow(textarea)
layout.EndVertical()
layout.BeginVertical(
Eto.Drawing.Padding(12, 12, 12, 18), Eto.Drawing.Size(6, 0), False, False
)
layout.AddRow(None, self.ok)
layout.EndVertical()
self.Content = layout
@property
def ok(self):
self.DefaultButton = Eto.Forms.Button(Text="OK")
self.DefaultButton.Click += self.on_ok
return self.DefaultButton
def on_ok(self, sender, event):
self.Close(True)
def show(self):
return self.ShowModal(Rhino.UI.RhinoEtoApp.MainWindow)
| 30.755556 | 85 | 0.642341 | 1,206 | 0.871387 | 0 | 0 | 165 | 0.11922 | 0 | 0 | 10 | 0.007225 |
a613386fbb04fd42a7d02cb198aa13ad34526d71 | 259 | py | Python | python-hello/src/main/python/FizzBuzz.py | demo-pool/languages-all | f9fac8f6d8dde92dea8c332777244a4f3af8bd3f | [
"Apache-2.0"
] | null | null | null | python-hello/src/main/python/FizzBuzz.py | demo-pool/languages-all | f9fac8f6d8dde92dea8c332777244a4f3af8bd3f | [
"Apache-2.0"
] | null | null | null | python-hello/src/main/python/FizzBuzz.py | demo-pool/languages-all | f9fac8f6d8dde92dea8c332777244a4f3af8bd3f | [
"Apache-2.0"
] | null | null | null | # -*- coding=utf-8 -*-
import sys
n = int(sys.argv[1])
print(n)
for i in range(1, n + 1):
整除5 = i % 5 == 0
整除3 = i % 3 == 0
if 整除3 and 整除5:
print('FizzBuzz')
elif 整除3:
print("Fizz")
elif 整除5:
print("Buzz")
else:
print(i)
| 14.388889 | 25 | 0.498069 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.155477 |
a613bf4c7cec25a8201b732ff819946f8ef2f964 | 8,776 | py | Python | MaaSSim/driver.py | Farnoud-G/MaaSSim | e9537b5d391da70b70f2b64c28f2e3378d1facd9 | [
"MIT"
] | null | null | null | MaaSSim/driver.py | Farnoud-G/MaaSSim | e9537b5d391da70b70f2b64c28f2e3378d1facd9 | [
"MIT"
] | null | null | null | MaaSSim/driver.py | Farnoud-G/MaaSSim | e9537b5d391da70b70f2b64c28f2e3378d1facd9 | [
"MIT"
] | null | null | null | ################################################################################
# Module: driver.py
# Description: Driver agent
# Rafal Kucharski @ TU Delft, The Netherlands
################################################################################
from enum import Enum
import time
class driverEvent(Enum):
"""
sequence of driver events
"""
STARTS_DAY = 0
OPENS_APP = 1
RECEIVES_REQUEST = 2
ACCEPTS_REQUEST = 3
REJECTS_REQUEST = 4
IS_ACCEPTED_BY_TRAVELLER = 5
IS_REJECTED_BY_TRAVELLER = 6
ARRIVES_AT_PICKUP = 7
MEETS_TRAVELLER_AT_PICKUP = 8
DEPARTS_FROM_PICKUP = 9
ARRIVES_AT_DROPOFF = 10
CONTINUES_SHIFT = 11
STARTS_REPOSITIONING = 12
REPOSITIONED = 13
DECIDES_NOT_TO_DRIVE = -1
ENDS_SHIFT = -2
class VehicleAgent(object):
"""
Driver Agent
operating in a loop between his shift_start and shift_end
serving cyclically enqueing to queue of his platform waiting for a match and serving the reuqest
"""
def __init__(self, simData, veh_id):
# ids
self.sim = simData # reference to Simulator object
self.id = veh_id # unique vehicle id
self.veh = self.sim.vehicles.loc[veh_id].copy() # copy of inData vehicle data
# if we want to restart vehicles everyday from fixed locations
if self.sim.params.get('vehicle_fixed_positions', False): #f#
self.veh.pos = self.sim.vehicle_fixed_positions.loc[self.id] #f#
self.platform_id = self.veh.platform # id of a platform
self.platform = self.sim.plats[self.platform_id] # reference to the platform
# local variables
self.paxes = list()
self.schedule = None # schedule served by vehicle (single request for case of non-shared rides)
self.exit_flag = False # raised at the end of the shift
self.tveh_pickup = None # travel time from .pos to request first node
# output reports
self.myrides = list() # report of this vehicle process, populated while simulating
# functions
self.f_driver_out = self.sim.functions.f_driver_out # exit from the system due to prev exp
self.f_driver_decline = self.sim.functions.f_driver_decline # reject the incoming request
self.f_driver_repos = self.sim.functions.f_driver_repos # reposition after you are free again
# events
self.requested = self.sim.env.event() # triggers when vehicle is requested
self.arrives_at_pick_up = dict() # list of events for each passengers in the schedule
self.arrives = dict() # list of events for each arrival at passenger origin
# main action
self.action = self.sim.env.process(self.loop_day()) # main process in simu
def update(self, event=None, pos=None, db_update=True):
# call whenever pos or event of vehicle changes
# keeping consistency with DB during simulation
if event:
self.veh.event = event
if pos:
self.veh.pos = pos # update position
if db_update:
self.sim.vehicles.loc[self.id] = self.veh
self.append_ride()
def append_ride(self):
""" appends current event in time and space to the log of vehicle rides """
ride = dict()
ride['veh'] = self.id
ride['pos'] = self.veh.pos
ride['t'] = self.sim.env.now
ride['event'] = self.veh.event.name
ride['paxes'] = list(self.paxes) # None if self.request is None else self.request.name
self.myrides.append(ride)
self.disp()
def disp(self):
"""degugger"""
if self.sim.params.sleep > 0:
self.sim.logger.info(self.myrides[-1])
time.sleep(self.sim.params.sleep)
def till_end(self):
# returns how much time left until end of shift or end of sim
till_shift_end = self.veh.shift_end - self.sim.env.now
till_sim_end = self.sim.t1 - self.sim.env.now - 1
return min(till_shift_end, till_sim_end)
def clear_me(self):
""" clear the requests and schedules to prepare for a next ride
(after arrival, rejection or whatever reason not to complete the requests"""
self.arrives_at_pick_up = dict() # init lists
self.arrives = dict()
self.requested = self.sim.env.event() # initialize request event
# self.request = None # initialize the request
self.schedule = None
def loop_day(self):
# main routine of the vehicle process
self.update(event=driverEvent.STARTS_DAY)
if self.f_driver_out(veh=self): # first see if driver wants to work this day (by default he wants)
self.update(event=driverEvent.DECIDES_NOT_TO_DRIVE)
msg = "veh {:>4} {:40} {}".format(self.id, 'opted-out from the system', self.sim.print_now())
self.sim.logger.info(msg)
return
yield self.sim.timeout(self.veh.shift_start, variability=self.sim.vars.shift) # wait until shift start
self.update(event=driverEvent.OPENS_APP) # in the system
while True:
# try: # depreciated since now traveller rejects instantly for simplicity
repos = self.f_driver_repos(veh=self) # reposition yourself
if repos.flag: # if reposition
self.update(event=driverEvent.STARTS_REPOSITIONING)
yield self.sim.timeout(repos.time, variability=self.sim.vars.ride)
self.update(event=driverEvent.REPOSITIONED, pos=repos.pos)
self.platform.appendVeh(self.id) # appended for the queue
yield self.requested | self.sim.timeout(self.till_end()) # wait until requested or shift end
if self.schedule is None:
if self.id in self.sim.vehQ: # early exit if I quit shift, or sim ends
self.platform.vehQ.pop(self.sim.vehQ.index(self.id))
self.platform.updateQs()
self.exit_flag = True
else:
# create events for each traveller
for req in self.schedule.req_id.dropna().unique(): # two events per traveller
self.arrives_at_pick_up[req] = self.sim.env.event() # when vehicle is ready to pick him up
self.arrives[req] = self.sim.env.event() # when vehicle arrives at dropoff
no_shows = list()
for i in range(1, self.schedule.shape[0]): # loop over the schedule
stage = self.schedule.loc[i]
if stage.req_id in no_shows:
break # we do not serve this gentleman
yield self.sim.timeout(self.sim.skims.ride[self.veh.pos][stage.node],
variability=self.sim.vars.ride) # travel time
if stage.od == 'o': # pickup
self.arrives_at_pick_up[stage.req_id].succeed() # raise event
self.update(event=driverEvent.ARRIVES_AT_PICKUP, pos=stage.node) # vehicle arrived
# driver waits until traveller arrives (or patience)
yield self.sim.pax[stage.req_id].arrived_at_pick_up | \
self.sim.timeout(self.sim.params.times.pickup_patience,
variability=self.sim.vars.ride)
if not self.sim.pax[stage.req_id].arrived_at_pick_up: # if traveller did not arrive
no_shows.apppend(stage.req_id)
break # we do not serve this gentleman
self.update(event=driverEvent.MEETS_TRAVELLER_AT_PICKUP)
yield self.sim.pax[stage.req_id].pickuped # wait until passenger has boarded
self.paxes.append(int(stage.req_id))
self.update(event=driverEvent.DEPARTS_FROM_PICKUP)
elif stage.od == 'd':
self.arrives[stage.req_id].succeed() # arrived
self.update(event=driverEvent.ARRIVES_AT_DROPOFF, pos=stage.node)
yield self.sim.pax[stage.req_id].dropoffed # wait until passenger has left
self.paxes.remove(stage.req_id)
self.clear_me() # initialize events. clear request
if self.till_end() <= 1: # quit shift
self.exit_flag = True
if self.exit_flag:
# handles end of the sim
self.update(event=driverEvent.ENDS_SHIFT)
msg = "veh {:>4} {:40} {}".format(self.id, 'quitted shift', self.sim.print_now())
self.sim.logger.info(msg)
break
| 48.21978 | 111 | 0.595602 | 8,477 | 0.96593 | 4,314 | 0.491568 | 0 | 0 | 0 | 0 | 2,760 | 0.314494 |
a614e63989f4c0650fe16bea6a88db1937719591 | 7,918 | py | Python | nomogram.py | maxipi/head-loss-nomogram | c355e34aedb67818f311425188720e91449a1e90 | [
"MIT"
] | 1 | 2021-02-19T05:21:26.000Z | 2021-02-19T05:21:26.000Z | nomogram.py | maxipi/head-loss-nomogram | c355e34aedb67818f311425188720e91449a1e90 | [
"MIT"
] | null | null | null | nomogram.py | maxipi/head-loss-nomogram | c355e34aedb67818f311425188720e91449a1e90 | [
"MIT"
] | null | null | null | from math import pi
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.optimize import minimize_scalar
__author__ = "Markus Pichler"
__credits__ = ["Markus Pichler"]
__maintainer__ = "Markus Pichler"
__email__ = "markus.pichler@tugraz.at"
__version__ = "0.1"
__license__ = "MIT"
# gravitational acceleration
g = 9.81 # m/s²
# kinematic viscosity
ny = 1.3e-6 # m^2/s (10°C water)
# _________________________________________________________________________________________________________________
def log_scale(start, end, minor=False, lower=None, upper=None):
"""
get the log scale ticks for the diagram
Args:
start (int):
end (int):
minor (bool):
lower (int | float):
upper (int | float):
Returns:
numpy.array: ticks of the scale
"""
if minor:
std = np.array([1., 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.,
2.2, 2.4, 2.6, 2.8, 3., 3.2, 3.4, 3.6, 3.8, 4., 4.2,
4.4, 4.6, 4.8, 5., 5.5, 6., 6.5, 7., 7.5, 8., 8.5,
9., 9.5, 10.])
else:
std = np.array([1., 1.5, 2., 3., 4., 5., 6., 8., 10.])
res = np.array([])
for x in range(start, end):
res = np.append(res, std * 10. ** x)
res = np.unique(res.round(3))
if lower is not None:
res = res[res >= lower]
if upper is not None:
res = res[res <= upper]
return res
def nomogram(k=0.1):
"""
make the nomogram
Args:
k (float): roughness in (mm)
Returns:
matplotlib.pyplot.Figure: of the plot
"""
# diameter
d = np.array(
[0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.1, 0.125, 0.15, 0.2, 0.25, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0,
1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0]) # m
# velocity
v = np.array(
[0.1, 0.15, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.2, 1.4, 1.6, 1.8, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0,
6.0, 7.0, 8.0, 9.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]) # m/s
# head loss
J = log_scale(-1, 3, minor=True) # mm/m
J_labels = log_scale(-1, 3, minor=False)
# flow
Q = log_scale(-1, 5, minor=True, upper=20000) # L/s
Q_labels = log_scale(-1, 5, minor=False, upper=20000)
# _________________________________________________________________________________________________________________
def area(d):
return d ** 2 * pi / 4
# _________________________________________________________________________________________________________________
def velocity(J, d):
return -2 * np.log10(2.51 * ny / (d * np.sqrt(2 * g * (J / 1000) * d)) +
(k / 1000) / (3.71 * d)) * \
np.sqrt(2 * g * d * (J / 1000))
# _________________________________________________________________________________________________________________
def get_diameter(v, J):
res = minimize_scalar(lambda x: abs(velocity(J, x) - v), bounds=(min(d), max(d)), method='bounded').x
if (round(res, 5) >= max(d)) or (round(res, 5) <= min(d)):
return np.NaN
return res
# _________________________________________________________________________________________________________________
fig, ax = plt.subplots()
def bbox(pad):
return {'facecolor': 'white', 'alpha': 0.8, 'pad': pad, 'linewidth': 0}
# _________________________________________________________________________________________________________________
# diameter lines
df_d = pd.DataFrame(index=J, columns=d)
first = True
for d_ in df_d:
vi = velocity(df_d.index.values, d_)
df_d[d_] = area(d_) * vi * 1000
# change_d = 0.6
# low, up = [0.34, 5.4]
change_d = np.NaN
low, up = [2.2, 2.2]
if d_ == change_d:
tvs = [low, up]
elif d_ < change_d:
tvs = [low]
else:
tvs = [up]
for tv in tvs:
tx = np.interp(tv, vi, J)
ty = area(d_) * tv * 1000
if first or d_ in (change_d, max(d)):
txt = 'd={}m'.format(d_)
if first:
first = False
else:
txt = d_
ax.text(tx, ty, txt, fontsize=5, rotation=30, horizontalalignment='center', verticalalignment='bottom',
bbox=bbox(1))
ax = df_d.plot(c='black', legend=False, logy=True, logx=True, ax=ax, lw=0.5)
# _________________________________________________________________________________________________________________
# velocity lines
print('0')
df_v = pd.DataFrame(index=np.logspace(-1, 3, num=500), columns=v)
# df_v = pd.DataFrame(index=J, columns=v)
first = True
for v_ in df_v:
d_ = df_v.index.to_series().apply(lambda Ji: get_diameter(v_, Ji)).values
# d_ = np.array([get_d(v_, Ji) for Ji in df_v.index.values])
Ai = area(d_)
df_v[v_] = Ai * v_ * 1000
# change_v = 5.
# low, up = [0.043, 0.43]
change_v = 9.
low, up = [0.11, 0.43]
if v_ == change_v:
tds = [low, up]
elif v_ < change_v:
tds = [low]
else:
tds = [up]
for td in tds:
data = pd.DataFrame()
data['d'] = d_
data['J'] = df_v.index.values
data.dropna(inplace=True)
data.sort_values('d', inplace=True)
tx = np.interp(td, data['d'].values, data['J'].values)
ty = area(td) * v_ * 1000
if first or (v_ in (change_v, max(v))):
txt = 'v={}m/s'.format(v_).replace('.0', '')
if first:
first = False
else:
txt = v_
if pd.notna(tx) and pd.notna(ty):
ax.text(tx, ty, txt, fontsize=5, rotation=-60, horizontalalignment='center', verticalalignment='bottom',
bbox=bbox(1))
print('1')
ax = df_v.plot(c='black', legend=False, logy=True, logx=True, ax=ax, lw=0.5)
# _________________________________________________________________________________________________________________
ax.set_xticks(J, minor=True)
ax.set_yticks(Q, minor=True)
ax.set_xticks(J_labels, minor=False)
ax.set_yticks(Q_labels, minor=False)
ax.set_xticklabels([])
ax.set_yticklabels([])
ax.set_xticklabels([], minor=True)
ax.set_yticklabels([], minor=True)
ax.set_xticklabels([str(x).replace('.00', '').replace('.0', '') for x in J_labels], fontsize=6,
fontstretch='ultra-condensed')
ax.set_yticklabels([str(x).replace('.00', '').replace('.0', '') for x in Q_labels], fontsize=6)
ax.grid(linestyle=':', lw=0.2, c='grey', which='minor')
ax.grid(linestyle='-', lw=0.4, c='darkgrey')
ax.set_xlabel('Druckhöhengefälle J (mm/m)')
ax.set_ylabel('Durchfluss Q (l/s)')
ax.set_ylim([min(Q), max(Q)])
ax.set_xlim([min(J), max(J)])
ax.tick_params(direction='out', bottom=True, top=True, left=True, right=True, labelbottom=True, labeltop=True,
labelleft=True, labelright=True, which='both')
ax.text(0.15, 11000, 'k = {:0.01f} mm'.format(k), fontsize=22, fontstretch='ultra-condensed', bbox=bbox(5))
ax.text(340, 1.7, 'v (m/s)', fontsize=12, rotation=-60, bbox=bbox(2))
ax.text(300, 0.6, 'd (m)', fontsize=12, rotation=30, bbox=bbox(2))
# _________________________________________________________________________________________________________________
# figure post processing
fig.set_size_inches(h=29.7 / 2.54, w=21 / 2.54)
fig.tight_layout()
return fig
if __name__ == '__main__':
fig = nomogram()
k = 0.1 # mm
fig.savefig('Nomogramm k_{:0.1f}mm'.format(k).replace('.', '') + '.pdf')
plt.close(fig)
| 32.584362 | 120 | 0.570599 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,239 | 0.282631 |
a616479ff75877116910e729d7edffd6c5271108 | 278 | py | Python | 21. generadores.py | JSNavas/CursoPython2.7 | d1f9170dbf897b6eb729f9696a208880e33c550b | [
"MIT"
] | null | null | null | 21. generadores.py | JSNavas/CursoPython2.7 | d1f9170dbf897b6eb729f9696a208880e33c550b | [
"MIT"
] | null | null | null | 21. generadores.py | JSNavas/CursoPython2.7 | d1f9170dbf897b6eb729f9696a208880e33c550b | [
"MIT"
] | null | null | null | lista = ["bienvenido "]
ciclo = (c * 4 for c in lista)
print ciclo
print ciclo.next()
for cadena in ciclo:
print cadena
print
n = input("Factorial de: ")
def factorial(n):
i = 1
while n > 1:
i = n * i
yield i
n -= 1
for fact in factorial(n):
print fact
| 8.6875 | 30 | 0.600719 | 0 | 0 | 69 | 0.248201 | 0 | 0 | 0 | 0 | 29 | 0.104317 |
a618f828871a4f9f42a3e61da6a4defd594afbdb | 176 | py | Python | d2/detr/__init__.py | reubenwenisch/detr_custom | ae03ed599336f184e471eaf0048614dd788ffaf9 | [
"Apache-2.0"
] | 8,849 | 2020-05-27T00:52:55.000Z | 2022-03-31T14:21:30.000Z | d2/detr/__init__.py | reubenwenisch/detr_custom | ae03ed599336f184e471eaf0048614dd788ffaf9 | [
"Apache-2.0"
] | 453 | 2020-05-27T04:01:32.000Z | 2022-03-30T03:48:26.000Z | d2/detr/__init__.py | reubenwenisch/detr_custom | ae03ed599336f184e471eaf0048614dd788ffaf9 | [
"Apache-2.0"
] | 1,691 | 2020-05-27T02:16:40.000Z | 2022-03-31T05:44:39.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .config import add_detr_config
from .detr import Detr
from .dataset_mapper import DetrDatasetMapper
| 35.2 | 70 | 0.818182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 70 | 0.397727 |
a6192df53d0f19acab254e85fa3cfaa6abba3386 | 28 | py | Python | models/__init__.py | Rozi1/MobileNetV2_CIFAR10 | 5e3a7cac963c9b84d8efc60984bda6956ba8ec26 | [
"MIT"
] | null | null | null | models/__init__.py | Rozi1/MobileNetV2_CIFAR10 | 5e3a7cac963c9b84d8efc60984bda6956ba8ec26 | [
"MIT"
] | null | null | null | models/__init__.py | Rozi1/MobileNetV2_CIFAR10 | 5e3a7cac963c9b84d8efc60984bda6956ba8ec26 | [
"MIT"
] | null | null | null | from .mobilenetv2 import *
| 9.333333 | 26 | 0.75 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
a61a5253b5a95c6aebea946cdcae6bc0f71fcf56 | 3,086 | py | Python | clients/KelsonD/validator.py | KelsonDenton/battleships | a2b22560af07b050427aa9120328a41d34cdaafb | [
"MIT"
] | null | null | null | clients/KelsonD/validator.py | KelsonDenton/battleships | a2b22560af07b050427aa9120328a41d34cdaafb | [
"MIT"
] | null | null | null | clients/KelsonD/validator.py | KelsonDenton/battleships | a2b22560af07b050427aa9120328a41d34cdaafb | [
"MIT"
] | null | null | null | import math
class MoveValidator:
# class takes coordinates, orient, spaces, and board as constructors
def __init__(self, coord, orient, spaces, board):
self.coordinate = coord
self.orientation = orient
self.space = spaces
self.playerBoard = board
# method that checks the user enters a valid input for orientation
def check_coord(self):
while self.coordinate < 0 or self.coordinate > 99 or self.playerBoard[self.coordinate] == "■":
self.coordinate = int(input("Please enter a coordinate that is in range and free: "))
self.coordinate = self.check_coord()
return self.coordinate
else:
return self.coordinate
# method that checks the user entered a valid character for orientation
def check_orient(self):
while self.orientation not in {'H', 'V'}:
self.orientation = str(input("Please enter an orientation that is H)orizontal or V)ertical: "))
self.orientation = self.check_orient()
return self.orientation
else:
return self.orientation
# method that checks ensuing places ship is placed are all free
def check_spaces(self):
try:
# checks if any following spots fall on existing piece in vertical
if self.orientation == 'V':
for i in range(1, self.space):
if self.playerBoard[self.coordinate + 10 * i] != "0":
self.coordinate = int(input("Piece falls on another. Please pick another coordinate: "))
self.check_spaces()
# checks if any following spots fall on existing piece in horizontal
elif self.orientation == 'H':
for i in range(1, self.space):
if self.playerBoard[self.coordinate + i] != "0":
self.coordinate = int(input("Piece falls on another. Please pick another coordinate: "))
self.check_spaces()
# catch when user enters coordinate out of range
except:
self.coordinate = int(input('The piece you entered fell off the edge. Pick another coordinate: '))
self.check_spaces()
return
# method to check if piece attempts to wrap around board
def wrap_around(self):
if self.orientation == 'H':
start_coord = int(self.coordinate) + 1
end_coord = int(self.coordinate) + int(self.space)
for i in range(start_coord, end_coord):
# if first digit of coord does not match first digit of next placement makes user re enter
if math.floor(i/10) != math.floor((i - 1) / 10):
self.coordinate = int(input("Piece falls off the edge. Pick another coordinate: "))
self.wrap_around()
# method that puts together all other methods
def check(self):
self.coordinate = self.check_coord()
self.orientation = self.check_orient()
self.check_spaces()
self.wrap_around()
| 44.724638 | 112 | 0.604342 | 3,073 | 0.995142 | 0 | 0 | 0 | 0 | 0 | 0 | 1,023 | 0.331282 |
a61c4e95d34c5941c55c963f8a3737186e992605 | 777 | py | Python | tests/test_lights.py | kevinlondon/python-room-indicator | 5c92d7482a8e8539863189c8dc0ee4b78fe5d424 | [
"MIT"
] | 1 | 2015-04-30T19:40:04.000Z | 2015-04-30T19:40:04.000Z | tests/test_lights.py | kevinlondon/python-room-indicator | 5c92d7482a8e8539863189c8dc0ee4b78fe5d424 | [
"MIT"
] | 16 | 2015-04-08T16:40:48.000Z | 2015-04-30T19:39:56.000Z | tests/test_lights.py | kevinlondon/meetingbot | 5c92d7482a8e8539863189c8dc0ee4b78fe5d424 | [
"MIT"
] | null | null | null | import pytest
from mock import patch
from pubsub import pub
from meetingbot import lights
class TestLittleBits:
@patch.object(lights, "change_littlebits_power")
def test_subscription_calls_change_lights(self, littlebits_mock):
pub.subscribe(lights.change_light, "meeting_light")
pub.sendMessage("meeting_light", color="red")
littlebits_mock.assert_called_with(lights.COLORS['red'])
def test_invalid_color_raises_value_error(self):
with pytest.raises(ValueError):
lights.change_light(color="black")
@patch.object(lights.pub, "subscribe")
def test_configure_calls_subscribe_with_light_channel(self, sub_mock):
lights.configure()
sub_mock.assert_called_with(lights.change_light, lights.CHANNEL)
| 33.782609 | 74 | 0.750322 | 684 | 0.880309 | 0 | 0 | 510 | 0.656371 | 0 | 0 | 83 | 0.106821 |
a61c51cede02299e8b9027760641e7c08fd3ef9b | 3,113 | py | Python | task_2/csv_process.py | meklon/python_geekbrains | 535a54afabcff9d7ee9643c592ed2a1d0dd48b32 | [
"MIT"
] | null | null | null | task_2/csv_process.py | meklon/python_geekbrains | 535a54afabcff9d7ee9643c592ed2a1d0dd48b32 | [
"MIT"
] | null | null | null | task_2/csv_process.py | meklon/python_geekbrains | 535a54afabcff9d7ee9643c592ed2a1d0dd48b32 | [
"MIT"
] | null | null | null | import os
from os import listdir
from os.path import isfile, join
from pathlib import Path
from typing import Dict
from typing import List
from chardet.universaldetector import UniversalDetector
from pandas import DataFrame
def get_file_list(files_path: str) -> List[str]:
files = [files for files in listdir(files_path) if isfile(join(files_path, files))]
files = [join(files_path, file) for file in files]
return files
def detect_file_encoding(path_file: Path) -> str:
detector = UniversalDetector()
with path_file.open(mode='rb') as f:
for line in f:
detector.feed(line)
if detector.done:
break
detector.close()
f.close()
encoding = detector.result["encoding"]
return encoding
def list_from_file(path_file: Path, encoding: str) -> List[str]:
with path_file.open(mode='r', encoding=encoding) as f:
lines = f.read().splitlines()
data = list(lines)
f.close()
filtered_data = filter_data(data)
return filtered_data
def filter_data(data: List[str]) -> List[str]:
# Filtering empty lines
filtered_data = list(filter(None, data))
# Removing duplicate whitespaces and newline characters
filtered_data = [' '.join(line.split()) for line in filtered_data]
return filtered_data
def parse_data(file_data: List[str]) -> Dict[str, str]:
data_dict = {}
for entry in file_data:
if "Изготовитель системы" in entry:
data_dict["Изготовитель системы"] = pick_value_from_string(entry)
elif "Название ОС" in entry:
data_dict["Название ОС"] = pick_value_from_string(entry)
elif "Код продукта" in entry:
data_dict["Код продукта"] = pick_value_from_string(entry)
elif "Тип системы" in entry:
data_dict["Тип системы"] = pick_value_from_string(entry)
return data_dict
def pick_value_from_string(text: str) -> str:
head, sep, tail = text.partition(': ')
return tail
def get_df_total(files_paths: List[str]) -> DataFrame:
columns = ["Изготовитель системы", "Название ОС", "Код продукта", "Тип системы»"]
df = DataFrame(index=[0], columns=columns)
for file_path in files_paths:
file_path = Path(file_path)
encoding = detect_file_encoding(file_path)
file_data = list_from_file(file_path, encoding)
data_dict = parse_data(file_data)
df = df.append(data_dict, ignore_index=True)
# Removing empty row
df = df.drop([0])
# Do some other data processing with pandas DataFrame...
return df
def save_to_csv(df: DataFrame, path: str) -> None:
path = Path(path)
df.to_csv(path, sep=';', index=False)
def check_mkdir_output_path(path_output: str) -> None:
# Function checks if the output path exists and creates it if not
if not os.path.exists(path_output):
os.mkdir(path_output)
def main():
files_paths = get_file_list('data/csv/')
df = get_df_total(files_paths)
print(df)
check_mkdir_output_path('result/')
save_to_csv(df, 'result/data.csv')
if __name__ == "__main__":
main()
| 29.367925 | 87 | 0.674912 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 631 | 0.193321 |
a61cd68d579ebad4e09d900ef2fe940ab06920ee | 8,852 | py | Python | bbbs/afisha/tests/test_urls.py | AnnaKPolyakova/bbbs | e12b172e601d854291f4877a41ac20798388af4c | [
"MIT"
] | 2 | 2021-07-13T10:31:10.000Z | 2021-07-22T21:32:01.000Z | bbbs/afisha/tests/test_urls.py | AnnaKPolyakova/bbbs | e12b172e601d854291f4877a41ac20798388af4c | [
"MIT"
] | 28 | 2021-06-12T23:31:04.000Z | 2021-07-19T05:51:42.000Z | bbbs/afisha/tests/test_urls.py | AnnaKPolyakova/bbbs | e12b172e601d854291f4877a41ac20798388af4c | [
"MIT"
] | 5 | 2021-06-17T20:24:13.000Z | 2021-10-02T02:36:32.000Z | from django.urls import reverse
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from bbbs.afisha.factories import EventFactory
from bbbs.afisha.models import EventParticipant
from bbbs.common.factories import CityFactory
from bbbs.users.factories import UserFactory
from bbbs.users.models import Profile
class AfishaURLTests(APITestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.city = CityFactory(name="Воркута")
cls.mentor = UserFactory(
profile__role=Profile.Role.MENTOR,
profile__city=cls.city,
)
cls.moderator_reg = UserFactory(
profile__role=Profile.Role.MODERATOR_REG,
profile__city=cls.city,
)
cls.moderator_gen = UserFactory(
profile__role=Profile.Role.MODERATOR_GEN,
profile__city=cls.city,
)
cls.admin = UserFactory(
profile__role=Profile.Role.ADMIN,
profile__city=cls.city,
)
cls.users = [
cls.mentor,
cls.moderator_reg,
cls.moderator_gen,
cls.admin,
]
cls.event = EventFactory(
city=cls.mentor.profile.city,
)
cls.booking = EventParticipant.objects.create(
user=cls.mentor,
event=cls.event,
)
cls.unauthorized_client = APIClient()
cls.path_events_participants = reverse("event-participants-list")
cls.path_individual_booking = reverse(
"event-participants-detail",
args=[cls.mentor.profile.id],
)
cls.path_events = reverse("events")
def return_authorized_user_client(self, user):
authorized_client = APIClient()
authorized_client.force_authenticate(user=user)
return authorized_client
def url_returns_405_not_allowed_test_utility(
self, client, url, method_names
):
"""Helper. Tests "url" for not allowed methods.
It translates "methods_names" to correspond methods on "client" and
asserts when error different from 405 (not allowed) returns.
"""
for method_name in method_names:
with self.subTest(method_name):
method = getattr(client, method_name)
response = method(url)
self.assertEqual(
response.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED,
msg=(
f"Убедитесь, что для '{url}' "
f"метод '{method_name}' запрещен и возвращает "
f"правильный номер ошибки."
),
)
def url_returns_404_not_found_test_utility(
self, client, url, method_names
):
"""Helper. Tests "url" for 404 with provided methods.
It translates "methods_names" to correspond methods on "client" and
asserts when error different from 404 (not found) returns.
"""
for method_name in method_names:
with self.subTest(method_name):
method = getattr(client, method_name)
response = method(url)
self.assertEqual(
response.status_code,
status.HTTP_404_NOT_FOUND,
msg=(
f"Убедитесь, для индивидуальных URL, таких как"
f"'{url}' при запросе методом '{method_name}'"
f"возвращается ошибка 404"
),
)
def test_events_unauthorized_client(self):
"""Unauthorized client gets 401 error on 'events' url."""
client = AfishaURLTests.unauthorized_client
response = client.get(AfishaURLTests.path_events)
self.assertEqual(
response.status_code,
status.HTTP_401_UNAUTHORIZED,
msg=(
f"Проверьте что неавторизованный пользователь не имеет "
f"доступ к '{AfishaURLTests.path_events}'."
),
)
def test_events_participants_unauthorized_client(self):
"""Unauthorized client gets 401 error on 'event-participants' url."""
client = AfishaURLTests.unauthorized_client
response = client.get(AfishaURLTests.path_events_participants)
self.assertEqual(
response.status_code,
status.HTTP_401_UNAUTHORIZED,
msg=(
f"Проверьте что неавторизованный пользователь не имеет доступ "
f"к '{AfishaURLTests.path_events_participants}'."
),
)
def test_events_mentor_has_access(self):
"""Mentor gets response with 200 code on 'events'."""
user = AfishaURLTests.mentor
client = self.return_authorized_user_client(user=user)
response = client.get(AfishaURLTests.path_events)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg=(
f"Проверьте что пользователь с ролью "
f"'{user.profile.role}' "
f"имеет доступ к "
f"'{AfishaURLTests.path_events_participants}'."
),
)
def test_event_participants_mentor_has_access(self):
"""Mentor gets response with 200 code on 'events_participants'."""
user = AfishaURLTests.mentor
client = self.return_authorized_user_client(user=user)
response = client.get(AfishaURLTests.path_events_participants)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
msg=(
f"Проверьте что пользователь с ролью "
f"'{user.profile.role}' "
f"имеет доступ к "
f"'{AfishaURLTests.path_events_participants}'."
),
)
def test_events_individual_urls_return_404(self):
"""URLs like '/events/{id}' should return 404 for tested methods."""
methods_to_test = [
"get",
"patch",
"post",
"put",
"delete",
]
event_id = AfishaURLTests.event.id
individual_event_url = AfishaURLTests.path_events + str(event_id)
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_404_not_found_test_utility(
client=client,
url=individual_event_url,
method_names=methods_to_test,
)
def test_events_participants_individual_urls_return_405(self):
"""URLs like '/events-participants/{id}' should return 405.
HTTP_405_METHOD_NOT_ALLOWED should be returned only for methods in
'not_allowed_method_names' list.
"""
not_allowed_method_names = [
"get",
"patch",
"post",
"put",
]
individual_booking_url = AfishaURLTests.path_individual_booking
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_405_not_allowed_test_utility(
client=client,
url=individual_booking_url,
method_names=not_allowed_method_names,
)
def test_events_list_url_returns_405(self):
"""URL '/events/' should return 405.
HTTP_405_METHOD_NOT_ALLOWED should be returned only for methods in
'not_allowed_method_names' list.
"""
not_allowed_method_names = [
"patch",
"post",
"put",
"delete",
]
events_url = AfishaURLTests.path_events
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_405_not_allowed_test_utility(
client=client,
url=events_url,
method_names=not_allowed_method_names,
)
def test_events_participants_list_returns_405(self):
"""URL '/events_participants/' should return 405.
HTTP_405_METHOD_NOT_ALLOWED should be returned only for methods in
'not_allowed_method_names' list.
"""
not_allowed_method_names = [
"patch",
"put",
"delete",
]
events_participants_url = AfishaURLTests.path_events_participants
client = self.return_authorized_user_client(AfishaURLTests.mentor)
self.url_returns_405_not_allowed_test_utility(
client=client,
url=events_participants_url,
method_names=not_allowed_method_names,
)
| 35.266932 | 80 | 0.583258 | 8,819 | 0.960675 | 0 | 0 | 1,349 | 0.14695 | 0 | 0 | 2,465 | 0.268519 |
a61d15ac9b3f74c6613b91bc2dfb0f3289f8d263 | 1,654 | py | Python | scripts/pdb_tofasta.py | XiyuChenFAU/kgs_vibration_entropy | 117c4a3d39ec6285eccc1d3b8e5de9a21db21ec9 | [
"MIT"
] | 1 | 2020-05-23T18:26:14.000Z | 2020-05-23T18:26:14.000Z | scripts/pdb_tofasta.py | XiyuChenFAU/kgs_vibration_entropy | 117c4a3d39ec6285eccc1d3b8e5de9a21db21ec9 | [
"MIT"
] | 8 | 2017-01-26T19:54:38.000Z | 2021-02-06T16:06:30.000Z | scripts/pdb_tofasta.py | XiyuChenFAU/kgs_vibration_entropy | 117c4a3d39ec6285eccc1d3b8e5de9a21db21ec9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2.7
import pdb_structure
import sys
import os.path
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: "+sys.argv[0]+" <pdb-file>")
sys.exit(1)
pdbFile = sys.argv[1]
struc = pdb_structure.PDBFile(pdbFile)
name = os.path.basename(pdbFile).replace(".pdb", "")
from collections import defaultdict
chain_map = defaultdict(list)
for a in struc.models[0]:
# if not a.hetatm:
chain_map[a.chain].append(a)
protresnmap = {'ALA': 'A',
'ARG': 'R',
'ASN': 'N',
'ASP': 'D',
'ASX': 'B',
'CYS': 'C',
'GLU': 'E',
'GLN': 'Q',
'GLX': 'Z',
'GLY': 'G',
'HIS': 'H',
'ILE': 'I',
'LEU': 'L',
'LYS': 'K',
'MET': 'M',
'PHE': 'F',
'PRO': 'P',
'SER': 'S',
'THR': 'T',
'TRP': 'W',
'TYR': 'Y',
'VAL': 'V',
'A': 'A',
'C': 'C',
'G': 'G',
'U': 'U',
'T': 'T'
}
for c in chain_map:
print(">"+name+"_"+c)
resi_map = {a.resi: protresnmap[a.resn] for a in chain_map[c]}
seq = []
for r in range(min(resi_map.keys()), max(resi_map.keys())+1):
seq.append("-" if r not in resi_map else resi_map[r])
print("".join(seq))
| 29.017544 | 70 | 0.354293 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 299 | 0.180774 |
a61da7302100b24b0ecf19c7fc7419c1d5f84365 | 4,122 | py | Python | normalizing_flows/flows/flow.py | TanguyUrvoy/normalizing-flows | e485fe0875c117517353a9ab40e19ff951561cfc | [
"MIT"
] | 15 | 2019-10-26T11:00:02.000Z | 2021-05-24T08:39:31.000Z | normalizing_flows/flows/flow.py | TanguyUrvoy/normalizing-flows | e485fe0875c117517353a9ab40e19ff951561cfc | [
"MIT"
] | 2 | 2021-03-26T16:01:13.000Z | 2021-06-09T11:49:48.000Z | normalizing_flows/flows/flow.py | TanguyUrvoy/normalizing-flows | e485fe0875c117517353a9ab40e19ff951561cfc | [
"MIT"
] | 8 | 2020-01-10T21:50:39.000Z | 2022-03-16T21:01:25.000Z | import tensorflow as tf
import tensorflow_probability as tfp
from typing import List
from .transform import Transform, AmortizedTransform
class Flow(AmortizedTransform):
def __init__(self, steps: List[Transform], input_shape=None, name='flow', *args, **kwargs):
"""
Constructs a new flow as a sequence of transforms or sub-flows.
"""
# unroll nested flows
steps_acc = []
for step_i in steps:
if isinstance(step_i, Flow):
for step_j in step_i.steps:
steps_acc.append(step_j)
else:
steps_acc.append(step_i)
self.steps = steps_acc
self.num_steps = len(self.steps)
# add num_flows alias for legacy code
self.num_flows = self.num_steps
super().__init__(*args, input_shape=input_shape, name=name, **kwargs)
@staticmethod
def uniform(num_flows, transform_init):
"""
Creates a simple, uniform flow with 'num_flows' steps using the transform_init constructor function.
transform_init should follow the signature f: i -> Transform, where i is the index of the current step
in the flow sequence and Transform is a valid transformer instance.
"""
assert num_flows > 0, "num_flows must be > 0"
transforms = [transform_init(i) for i in range(num_flows)]
transform_type = type(transforms[0])
assert all([transform_type == type(t) for t in transforms]), "All transforms should have the same type for uniform flow"
return Flow(transforms)
def _forward_shape(self, input_shape):
for step in self.steps:
input_shape = step._forward_shape(input_shape)
return input_shape
def _inverse_shape(self, input_shape):
for step in reversed(self.steps):
input_shape = step._inverse_shape(input_shape)
return input_shape
def _initialize(self, input_shape):
for step in self.steps:
step.initialize(input_shape)
input_shape = step._forward_shape(input_shape)
def _forward(self, z_0, *params: tf.Tensor, return_sequence=False, **kwargs):
"""
Computes the forward pass of the flow: z_k = f_k . f_k-1 ... f_1(z)
Tensor shapes:
z_0 : (batch_size, d)
params : optional sequence of tensors (batch_size, m_i) where m_i is the number of parameters for flow step i
"""
zs = [z_0]
ldj = 0.0
for i in range(self.num_steps):
step = self.steps[i]
params_i = [params[i]] if len(params) > i else []
z_i, ldj_i = step.forward(zs[-1], *params_i, **kwargs)
zs.append(z_i)
ldj += ldj_i
return (zs, ldj) if return_sequence else (zs[-1], ldj)
def _inverse(self, z, *params: tf.Tensor, return_sequence=False, **kwargs):
"""
Computes the inverse pass of the flow: z_0 = f^-1_1 . f^-1_2 ... f^-1_k(z)
Tensor shapes:
z_0 : (batch_size, d)
params : optional sequence of tensors (batch_size, m_i) where m_i is the number of parameters for flow step i
"""
zs = [z]
ldj = 0.0
for i in range(self.num_steps):
step = self.steps[self.num_steps-i-1]
params_i = [params[i]] if len(params) > i else []
z_i, ldj_i = step.inverse(zs[-1], *params_i, **kwargs)
tf.debugging.assert_all_finite(z_i, f'{step.name} output nan/inf values for input {zs[-1]}')
zs.append(z_i)
ldj += ldj_i
return (zs, ldj) if return_sequence else (zs[-1], ldj)
def _regularization_loss(self):
return tf.math.add_n([t.regularization_loss() for t in self.steps])
def _param_count(self, shape):
return tf.math.reduce_sum([t.param_count(shape) for t in self.steps if isinstance(t, AmortizedTransform)])
def _create_variables(self, shape, initializer=None, **kwargs):
return sum([t.create_variables(shape, initializer, **kwargs) \
for t in self.steps if isinstance(t, AmortizedTransform)],[])
| 41.636364 | 128 | 0.619602 | 3,982 | 0.966036 | 0 | 0 | 704 | 0.170791 | 0 | 0 | 1,138 | 0.27608 |
a61fb4cae066d888d2f215862b59d8af6c32f27d | 537 | py | Python | src/mainmodulename/plugins/type_one_plugin/__init__.py | portikCoder/basic_python_plugin_project | 3176f6d5683ec03ec670ebda7ca6513289c72699 | [
"MIT"
] | 1 | 2021-04-09T20:10:04.000Z | 2021-04-09T20:10:04.000Z | src/mainmodulename/plugins/type_one_plugin/__init__.py | portikCoder/basic_python_plugin_project | 3176f6d5683ec03ec670ebda7ca6513289c72699 | [
"MIT"
] | 1 | 2021-02-06T21:48:45.000Z | 2021-02-06T21:48:45.000Z | src/mainmodulename/plugins/type_one_plugin/__init__.py | portikCoder/basic_python_plugin_project | 3176f6d5683ec03ec670ebda7ca6513289c72699 | [
"MIT"
] | null | null | null | # Copyright (c) 2021 portikCoder. All rights reserved.
# See the license text under the root package.
from typing import Type
from mainmodulename.common.plugin_template import Plugin
from mainmodulename.plugins.type_one_plugin.type_one_plugin import TypeOnePlugin
PLUGIN_CLASS: Type[Plugin] = TypeOnePlugin
ALIASES = ['ctr', 'ctr_plugin', 'CtrFileNamePlugin']
def get_plugin_class() -> Type[Plugin]:
"""
If need some logic before returning the class itself purely, put it here.
:return:
"""
return PLUGIN_CLASS
| 29.833333 | 80 | 0.757914 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 240 | 0.446927 |
a6210dfe559293cf9f1af6eef76873ac94d75c61 | 468 | py | Python | submissions/Flanagin/myLogic.py | dysomni/aima-python | c67104e50007ec5ac2a9aa37f0cb972cb6315528 | [
"MIT"
] | null | null | null | submissions/Flanagin/myLogic.py | dysomni/aima-python | c67104e50007ec5ac2a9aa37f0cb972cb6315528 | [
"MIT"
] | null | null | null | submissions/Flanagin/myLogic.py | dysomni/aima-python | c67104e50007ec5ac2a9aa37f0cb972cb6315528 | [
"MIT"
] | null | null | null |
music = {
'kb': '''
Instrument(Flute)
Piece(Undine, Reinecke)
Piece(Carmen, Bourne)
(Instrument(x) & Piece(w, c) & Era(c, r)) ==> Program(w)
Era(Reinecke, Romantic)
Era(Bourne, Romantic)
''',
'queries': '''
Program(x)
''',
}
life = {
'kb': '''
Musician(x) ==> Stressed(x)
(Student(x) & Text(y)) ==> Stressed(x)
Musician(Heather)
''',
'queries': '''
Stressed(x)
'''
}
Examples = {
'music': music,
'life': life
} | 11.414634 | 56 | 0.519231 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 367 | 0.784188 |
a622911cc0068f50bdd2fd71f6dba3dc3e035b9f | 1,861 | py | Python | python/stack_of_plates.py | Zetinator/cracking_the_coding_interview | e0ee160cbd44c24994cbe903bffc7b0f1c0f069d | [
"MIT"
] | null | null | null | python/stack_of_plates.py | Zetinator/cracking_the_coding_interview | e0ee160cbd44c24994cbe903bffc7b0f1c0f069d | [
"MIT"
] | null | null | null | python/stack_of_plates.py | Zetinator/cracking_the_coding_interview | e0ee160cbd44c24994cbe903bffc7b0f1c0f069d | [
"MIT"
] | null | null | null | """3.3 Stack of Plates: Imagine a (literal) stack of plates. If the stack gets too high, it might topple.
Therefore, in real life, we would likely start a new stack when the previous stack exceeds some
threshold. Implement a data structure SetOfStacks that mimics this. SetOfStacks should be
composed of several stacks and should create a new stack once the previous one exceeds capacity.
SetOfStacks. push () and SetOfStacks. pop () should behave identically to a single stack
(that is, pop ( ) should return the same values as it would if there were just a single stack).
FOLLOW UP
Implement a function popAt (int index) which performs a pop operation on a specific sub-stack.
"""
class SetOfStacks():
def __init__(self, max_stack_size: int, x=[]):
self.core = []
self.max = max_stack_size
for e in x:
self.push(e)
def __repr__(self):
return repr(self.core)
def pop_at(self, index: int):
if not self.core: return
if not index < len(self.core): return
if not self.core[index]: return
return self.core[index].pop()
def pop(self):
if not self.core: return
while self.core and not self.core[-1]: self.core.pop()
if self.core: return self.core[-1].pop()
def push(self, value):
if not self.core: self.core = [[value]]; return
if len(self.core[-1]) == self.max: self.core.append([value])
else: self.core[-1].append(value)
def peek(self):
if self.core[n_stack]: return self.core[n_stack][-1]
# test
multi_stack = SetOfStacks(5, range(24))
# test push
print(f'after push: stacks: {multi_stack}')
# test pop_at
for _ in range(6):
print(f'after pop_at(2): {multi_stack.pop_at(2)}, stack: {multi_stack}')
# test pop
for i in range(20):
multi_stack.pop()
print(f'after pop: stacks: {multi_stack}')
| 37.22 | 105 | 0.668458 | 863 | 0.463729 | 0 | 0 | 0 | 0 | 0 | 0 | 858 | 0.461042 |
a622a26eeee249b906e3a3a0624316864f9d54ab | 930 | py | Python | theory/3rd_sprint/django_orm.py | abi83/YaPractice | 1c3a5670ee2f872d4f872623a392755318b893b5 | [
"MIT"
] | 3 | 2020-11-18T05:16:30.000Z | 2021-03-08T06:36:01.000Z | theory/3rd_sprint/django_orm.py | abi83/YaPractice | 1c3a5670ee2f872d4f872623a392755318b893b5 | [
"MIT"
] | null | null | null | theory/3rd_sprint/django_orm.py | abi83/YaPractice | 1c3a5670ee2f872d4f872623a392755318b893b5 | [
"MIT"
] | 1 | 2021-01-20T12:41:48.000Z | 2021-01-20T12:41:48.000Z | # Создайте модель мероприятия для сайта-афиши.
# У модели должны быть такие поля:
# Название мероприятия (name), не больше 200 символов
# Дата и время проведения мероприятия (start_at)
# Описание мероприятия (description)
# Адрес электронной почты организатора мероприятия (contact)
# Пользователь, который создал мероприятие (author,
# related_name этого поля должно быть events)
# Название места проведения мероприятия (location), не более 400 символов
from django.db import models
from django.contrib.auth import get_user_model
class Event(models.Model):
name = models.CharField(max_length=200)
start_at = models.DateTimeField('event published', auto_now_add=True)
description = models.TextField
contact = models.EmailField()
author = models.ForeignKey(get_user_model(), on_delete=models.CASCADE,
related_name="event_author")
location = models.CharField(max_length=400)
| 42.272727 | 74 | 0.760215 | 396 | 0.322738 | 0 | 0 | 0 | 0 | 0 | 0 | 774 | 0.630807 |
a624a95d76b077a78a35eb3699f9ea5308ea13a4 | 4,518 | py | Python | ovf2numpy.py | mikrl/mumate | 4962b1f2cf40f292ac32f3742067764d4e178f32 | [
"MIT"
] | 1 | 2020-01-16T13:50:26.000Z | 2020-01-16T13:50:26.000Z | ovf2numpy.py | mikrl/Helicoid-Simulations | 4962b1f2cf40f292ac32f3742067764d4e178f32 | [
"MIT"
] | 3 | 2020-07-08T19:32:28.000Z | 2020-07-08T19:33:21.000Z | ovf2numpy.py | mikrl/Helicoid-Simulations | 4962b1f2cf40f292ac32f3742067764d4e178f32 | [
"MIT"
] | 2 | 2020-05-07T15:25:21.000Z | 2020-07-04T02:45:48.000Z | from __future__ import division, print_function
import os
import os.path as fs
import numpy as np
import pandas as pd
import re
### PURPOSE: Takes a directory containing N files of the form mXXXXXX.ovf ###
### and imports them to an N x X x Y x Z x 3 numpy array ###
### where X,Y,Z are the number of cells in x,y,z ###
### Files will have the naming convention m*.ovf where * is 6 digit decimal number ###
### eg. 000000, 000001, 000123, etc ###
### So use regex to find something of the form m/<number>*/.ovf ###
def import_dir(path='.', which_files='all', skyrmion=False, core_slice='h', average=True):
#default path is this folder
#which files gives a range of files (default to all in dir)
ls = sorted(os.listdir(path)) #list and sort all files in given path
magnetisation_files=[] #init list of filenames in this dir
for el in ls: #test the regex for magnetisation file format, if found add to filename list
if re.match('m\d*\.ovf' ,el) is not None:
magnetisation_files.append(el)
file_name=fs.join(path, str(magnetisation_files[0])) #creates the filename for the first mag field
data_dimensions=getOvfAttributes(file_name) #gets the file attributes x,y,z nodes (eg 2x2x128)
num_files_to_import=len(magnetisation_files)
if which_files!='all':
print("not importing all files, importing files ",which_files[0], " to ", which_files[1])
num_files_to_import=which_files[1]-which_files[0]
all_mag_data=np.empty((num_files_to_import, data_dimensions[2]), dtype=(float, 3) )
i=0
first_time=True
percentages=[]
for n, fname in enumerate(magnetisation_files):
if which_files!='all':
if n<which_files[0]:
continue
if n>=which_files[1]:
break
if first_time:
print("starting to read ",num_files_to_import," files")
first_time=False
this_filename=fs.join(path, fname)
all_mag_data[i]=importOvfFilePandas(this_filename, data_dimensions, core_slice=core_slice, skyrmion=skyrmion, average_to_1D=average)
if i/num_files_to_import*100%10<0.2:
if np.floor(i*100/num_files_to_import) not in percentages:
print(np.floor(i*100.0/num_files_to_import),"% done")
percentages.append(np.floor(i*100/num_files_to_import))
i+=1
#print data_array.shape
print("100% done!")
print("read ",i," files")
return all_mag_data
def getOvfAttributes(filename):
if filename[-4:]!='.ovf': #if filetype is not ovf, exit with error code 1
print("FATAL ERROR, NOT AN OVF FILE")
return -1
f=open(filename, 'r')
j=0
for line in f:
if re.match('.*Binary.*', line) is not None: #if the data type is a binary, just exit with error code -2
print("FATAL ERROR: BINARY NOT SUPPORTED")
return -2
if j==20:
x_nodes=int(line[10:])
if j==21:
y_nodes=int(line[10:])
if j==22:
z_nodes=int(line[10:])
break
#print (str(j)+'\t'+str(line))
j+=1
f.close()
return(x_nodes, y_nodes, z_nodes)
# takes filename, imports ovf as pandas dataframe, takes data dimensions in (x,y,z) nodes format
def importOvfFilePandas(this_filename, data_dims, average_to_1D=False, skyrmion=False, core_slice='h'):
ave_axis=None
raw_data=pd.read_csv(this_filename, header=None, skiprows=28, skipfooter=2, delimiter="\s+")
magnetisation_array=np.reshape(raw_data.as_matrix(), np.append(data_dims[::-1],3))
if skyrmion:
m1=int(data_dims[1]/2-1)
m2=int(data_dims[1]/2+1)
if core_slice=='h':
magnetisation_array=magnetisation_array[:,m1:m2,:]
ave_axis=1
elif core_slice=='v':
magnetisation_array=magnetisation_array[:,:,m1:m2]
ave_axis=2
if average_to_1D:
magnetisation_array=np.mean(magnetisation_array, axis=ave_axis)
magnetisation_array=np.mean(magnetisation_array, axis=0)
elif average_to_1D:
for i in [1,2]:
magnetisation_array=np.mean(magnetisation_array, axis=1)
#print(magnetisation_array.shape)
return magnetisation_array
if __name__=="__main__":
#test=importOvfFilePandas('/home/michael/Desktop/Honours/MuMax3/DataProcessing/SkyrmionData/ovfimporttest/m000035.ovf', (128,128,1), skyrmion=True, h_core_slice=True, average_to_1D=True)
test=import_dir('/home/michael/Desktop/Honours/MuMax3/DataProcessing/HelicoidData/helicoidv8_mid.out/')
#test=importOvfFilePandas('/home/michael/Desktop/Honours/MuMax3/DataProcessing/SkyrmionData/ovfimporttest/m000035.ovf', (128,128,1), skyrmion=True, v_core_slice=True, average_to_1D=True)
#test=importOvfFilePandas('/home/michael/Desktop/Honours/MuMax3/DataProcessing/HelicoidData/helicoidv6.out/m000035.ovf', (2,2,128), average_to_1D=True)
| 36.435484 | 187 | 0.733289 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,880 | 0.416113 |
a6259b4f9145ab8259b28b51f8956f20553010e8 | 572 | py | Python | scrap_jobs/__main__.py | andytan0727/scrap-jobs | b03ca1b4b21e4e10e653d20e8a35d08cf1b70ab7 | [
"MIT"
] | null | null | null | scrap_jobs/__main__.py | andytan0727/scrap-jobs | b03ca1b4b21e4e10e653d20e8a35d08cf1b70ab7 | [
"MIT"
] | 2 | 2021-03-31T19:14:07.000Z | 2021-12-13T20:06:00.000Z | scrap_jobs/__main__.py | andytan0727/scrap-jobs | b03ca1b4b21e4e10e653d20e8a35d08cf1b70ab7 | [
"MIT"
] | null | null | null | """
Main entry for scrap_jobs module. To be run from console
"""
from scrap_jobs.app import App
from scrap_jobs.scraper.jobstreet import JobStreetScraper
if __name__ == '__main__':
key = input('Please input your search key: ')
location = input(
'Please enter your preferred location. Default to all if leave empty: '
)
scraper = JobStreetScraper(key, location)
scrapped_job_info = App.run_jobstreet_scraper(key, location)
App.convert_and_save_to_csv(scrapped_job_info)
App.convert_and_save_to_excel(scrapped_job_info)
| 33.647059 | 80 | 0.729021 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 179 | 0.312937 |
a62cb445a80bb771a1042ab9d3a511765794249f | 13,982 | py | Python | fetcher/proxyFetcher.py | PaleNeutron/proxy_pool | f43a3b62d4930877a6c26246985cce3628ef36d5 | [
"MIT"
] | null | null | null | fetcher/proxyFetcher.py | PaleNeutron/proxy_pool | f43a3b62d4930877a6c26246985cce3628ef36d5 | [
"MIT"
] | null | null | null | fetcher/proxyFetcher.py | PaleNeutron/proxy_pool | f43a3b62d4930877a6c26246985cce3628ef36d5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: proxyFetcher
Description :
Author : JHao
date: 2016/11/25
-------------------------------------------------
Change Activity:
2016/11/25: proxyFetcher
-------------------------------------------------
"""
__author__ = 'JHao'
import re
import requests
from time import sleep
from datetime import date, timedelta
import pandas as pd
import os
from util.webRequest import WebRequest
class ProxyFetcher(object):
"""
proxy getter
"""
@staticmethod
def freeProxy01():
"""
米扑代理 https://proxy.mimvp.com/
:return:
"""
url_list = [
'https://proxy.mimvp.com/freeopen',
'https://proxy.mimvp.com/freeopen?proxy=in_tp'
]
port_img_map = {'DMxMjg': '3128', 'Dgw': '80', 'DgwODA': '8080',
'DgwOA': '808', 'DgwMDA': '8000', 'Dg4ODg': '8888',
'DgwODE': '8081', 'Dk5OTk': '9999'}
for url in url_list:
html_tree = WebRequest().get(url).tree
for tr in html_tree.xpath(".//table[@class='mimvp-tbl free-proxylist-tbl']/tbody/tr"):
try:
ip = ''.join(tr.xpath('./td[2]/text()'))
port_img = ''.join(tr.xpath('./td[3]/img/@src')).split("port=")[-1]
port = port_img_map.get(port_img[14:].replace('O0O', ''))
if port:
yield '%s:%s' % (ip, port)
except Exception as e:
print(e)
@staticmethod
def freeProxy02():
"""
代理66 http://www.66ip.cn/
:return:
"""
url = "http://www.66ip.cn/mo.php"
resp = WebRequest().get(url, timeout=10)
proxies = re.findall(r'(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5})', resp.text)
for proxy in proxies:
yield proxy
@staticmethod
def freeProxy03():
"""
pzzqz https://pzzqz.com/
"""
from requests import Session
from lxml import etree
session = Session()
try:
index_resp = session.get("https://pzzqz.com/", timeout=20, verify=False).text
x_csrf_token = re.findall('X-CSRFToken": "(.*?)"', index_resp)
if x_csrf_token:
data = {"http": "on", "ping": "3000", "country": "cn", "ports": ""}
proxy_resp = session.post("https://pzzqz.com/", verify=False,
headers={"X-CSRFToken": x_csrf_token[0]}, json=data).json()
tree = etree.HTML(proxy_resp["proxy_html"])
for tr in tree.xpath("//tr"):
ip = "".join(tr.xpath("./td[1]/text()"))
port = "".join(tr.xpath("./td[2]/text()"))
yield "%s:%s" % (ip, port)
except Exception as e:
print(e)
@staticmethod
def freeProxy04():
"""
神鸡代理 http://www.shenjidaili.com/
:return:
"""
url = "http://www.shenjidaili.com/product/open/"
tree = WebRequest().get(url).tree
for table in tree.xpath("//table[@class='table table-hover text-white text-center table-borderless']"):
for tr in table.xpath("./tr")[1:]:
proxy = ''.join(tr.xpath("./td[1]/text()"))
yield proxy.strip()
@staticmethod
def freeProxy05(page_count=1):
"""
快代理 https://www.kuaidaili.com
"""
url_pattern = [
'https://www.kuaidaili.com/free/inha/{}/',
'https://www.kuaidaili.com/free/intr/{}/'
]
url_list = []
for page_index in range(1, page_count + 1):
for pattern in url_pattern:
url_list.append(pattern.format(page_index))
for url in url_list:
tree = WebRequest().get(url).tree
proxy_list = tree.xpath('.//table//tr')
sleep(1) # 必须sleep 不然第二条请求不到数据
for tr in proxy_list[1:]:
yield ':'.join(tr.xpath('./td/text()')[0:2])
@staticmethod
def freeProxy06(page=2):
"""
极速代理 https://www.superfastip.com/
:return:
"""
url = "https://api.superfastip.com/ip/freeip?page={page}"
for i in range(page):
page_url = url.format(page=i + 1)
try:
resp_json = WebRequest().get(page_url).json
for each in resp_json.get("freeips", []):
yield "%s:%s" % (each.get("ip", ""), each.get("port", ""))
except Exception as e:
print(e)
@staticmethod
def freeProxy07():
"""
云代理 http://www.ip3366.net/free/
:return:
"""
urls = ['http://www.ip3366.net/free/?stype=1',
"http://www.ip3366.net/free/?stype=2"]
for url in urls:
r = WebRequest().get(url, timeout=10)
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r.text)
for proxy in proxies:
yield ":".join(proxy)
@staticmethod
def freeProxy08():
"""
小幻代理 https://ip.ihuan.me/
:return:
"""
urls = [
'https://ip.ihuan.me/address/5Lit5Zu9.html',
]
for url in urls:
r = WebRequest().get(url, timeout=10)
proxies = re.findall(r'>\s*?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\s*?</a></td><td>(\d+)</td>',
r.text)
for proxy in proxies:
yield ":".join(proxy)
@staticmethod
def freeProxy09(page_count=1):
"""
http://ip.jiangxianli.com/
免费代理库
:return:
"""
for i in range(1, page_count + 1):
url = 'http://ip.jiangxianli.com/?country=中国&page={}'.format(i)
html_tree = WebRequest().get(url).tree
for index, tr in enumerate(html_tree.xpath("//table//tr")):
if index == 0:
continue
yield ":".join(tr.xpath("./td/text()")[0:2]).strip()
@staticmethod
def freeProxy10():
"""
墙外网站 cn-proxy
:return:
"""
urls = ['http://cn-proxy.com/', 'http://cn-proxy.com/archives/218']
request = WebRequest()
for url in urls:
r = request.get(url, timeout=10)
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\w\W]<td>(\d+)</td>', r.text)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxy11():
"""
https://proxy-list.org/english/index.php
:return:
"""
urls = ['https://proxy-list.org/english/index.php?p=%s' % n for n in range(1, 10)]
request = WebRequest()
import base64
for url in urls:
r = request.get(url, timeout=10)
proxies = re.findall(r"Proxy\('(.*?)'\)", r.text)
for proxy in proxies:
yield base64.b64decode(proxy).decode()
@staticmethod
def freeProxy12():
urls = ['https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-1']
request = WebRequest()
for url in urls:
r = request.get(url, timeout=10)
proxies = re.findall(r'<td>(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})</td>[\s\S]*?<td>(\d+)</td>', r.text)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxy13(max_page=2):
"""
http://www.89ip.cn/index.html
89免费代理
:param max_page:
:return:
"""
base_url = 'http://www.89ip.cn/index_{}.html'
for page in range(1, max_page + 1):
url = base_url.format(page)
r = WebRequest().get(url, timeout=10)
proxies = re.findall(
r'<td.*?>[\s\S]*?(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})[\s\S]*?</td>[\s\S]*?<td.*?>[\s\S]*?(\d+)[\s\S]*?</td>',
r.text)
for proxy in proxies:
yield ':'.join(proxy)
@staticmethod
def freeProxy14():
"""
http://www.xiladaili.com/
西拉代理
:return:
"""
urls = ['http://www.xiladaili.com/']
for url in urls:
r = WebRequest().get(url, timeout=10)
ips = re.findall(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}", r.text)
for ip in ips:
yield ip.strip()
@staticmethod
def freeProxy14():
"""
http://www.xiladaili.com/
西拉代理
:return:
"""
urls = ['http://www.xiladaili.com/']
for url in urls:
r = WebRequest().get(url, timeout=10)
ips = re.findall(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}", r.text)
for ip in ips:
yield ip.strip()
@staticmethod
def freeProxy15(): # 命名不和已有重复即可
proxies = requests.get('https://raw.githubusercontent.com/clarketm/proxy-list/master/proxy-list-raw.txt').text.split()
for proxy in proxies:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy16(): # 命名不和已有重复即可
ip_list=[]
for day in range(0, 30, 1):
dd=date.today()-timedelta(days=day)
ip_url='https://webanetlabs.net/proxylist2021/spisok_proksi_na_'+dd.strftime("%d.%m.%Y")+'.html'
source=requests.get(ip_url)
if source.status_code==200:
ip_list+=re.findall(r'[0-9]+(?:\.[0-9]+){3}:[0-9]+', source.text)
for proxy in ip_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy17(): # 命名不和已有重复即可
http_ips=requests.get('https://www.proxy-list.download/api/v1/get?type=http').text.split()
https_ips=requests.get('https://www.proxy-list.download/api/v1/get?type=https').text.split()
for proxy in http_ips+https_ips:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy18(): # 命名不和已有重复即可
json_results=requests.get('https://proxylist.geonode.com/api/proxy-list?limit=4000&page=1&sort_by=lastChecked&sort_type=desc&protocols=https%2Csocks4%2Csocks5').json()
ip_list=[s['ip']+':'+s['port'] for s in json_results['data']]
for proxy in ip_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy19(): # 命名不和已有重复即可
ip_list=[]
for pg_num in range(1, 7):
df_ips=pd.read_html(requests.get('https://list.proxylistplus.com/Fresh-HTTP-Proxy-List-'+str(pg_num)).text)[2]
ip_list+=[s[0]+':'+str(s[1]) for s in df_ips[['IP Address.1', 'Port']].values]
for proxy in ip_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy20(): # 命名不和已有重复即可
proxy_list=requests.get('https://api.proxyscrape.com/?request=displayproxies&proxytype=all').text.split()
for proxy in proxy_list:
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy21(): # 命名不和已有重复即可
proxy_list=requests.get('https://www.proxyscan.io/download?type=https').text.split()
proxy_list+=requests.get('https://www.proxyscan.io/download?type=socks4').text.split()
proxy_list+=requests.get('https://www.proxyscan.io/download?type=socks5').text.split()
for proxy in set(proxy_list):
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy22(): # 命名不和已有重复即可
proxy_list=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.socks-proxy.net/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.sslproxies.org/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://free-proxy-list.net/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.us-proxy.org/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://free-proxy-list.net/uk-proxy.html').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://www.sslproxies.org/').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
proxy_list+=[s[0]+':'+str(int(s[1])) for s in
pd.read_html(requests.get('https://free-proxy-list.net/anonymous-proxy.html').text)[0][['IP Address', 'Port']].values
if str(s[0])!='nan']
for proxy in set(proxy_list):
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
@staticmethod
def freeProxy23(): # 命名不和已有重复即可
# proxy_list=requests.get('https://raw.githubusercontent.com/ma-ji/proxy_pool/master/fetcher/slow_rotate.txt').text.split()
dirname = os.path.dirname(os.path.abspath(__file__))
fn = os.path.join(dirname, "slow_rotate.txt")
with open(fn) as f:
proxy_list = f.read().split()
for proxy in set(proxy_list):
yield proxy
# 确保每个proxy都是 host:ip正确的格式返回
if __name__ == '__main__':
p = ProxyFetcher()
for _ in p.freeProxy13():
print(_)
| 37.285333 | 175 | 0.505293 | 13,891 | 0.95668 | 13,168 | 0.906887 | 13,600 | 0.936639 | 0 | 0 | 5,449 | 0.375275 |
a62e304e0b732c8f5f93c43c8f46be4516b03ad1 | 5,064 | py | Python | python/toy/weather_wechat.py | tagwan/scripts | f88846f13b1e3d05c093aff9124d927d6873280c | [
"MIT"
] | null | null | null | python/toy/weather_wechat.py | tagwan/scripts | f88846f13b1e3d05c093aff9124d927d6873280c | [
"MIT"
] | null | null | null | python/toy/weather_wechat.py | tagwan/scripts | f88846f13b1e3d05c093aff9124d927d6873280c | [
"MIT"
] | null | null | null | import requests
import json
import datetime
def weather(city):
url = "http://wthrcdn.etouch.cn/weather_mini?city=%s" % city
try:
data = requests.get(url).json()['data']
city = data['city']
ganmao = data['ganmao']
today_weather = data['forecast'][0]
res = "老婆今天是{}\n今天天气概况\n城市: {:<10}\n时间: {:<10}\n高温: {:<10}\n低温: {:<10}\n风力: {:<10}\n风向: {:<10}\n天气: {:<10}\n\n稍后会发送近期温度趋势图,请注意查看。\
".format(
ganmao,
city,
datetime.datetime.now().strftime('%Y-%m-%d'),
today_weather['high'].split()[1],
today_weather['low'].split()[1],
today_weather['fengli'].split('[')[2].split(']')[0],
today_weather['fengxiang'], today_weather['type'],
)
return {"source_data": data, "res": res}
except Exception as e:
return str(e)
"""
获取天气预报趋势图
"""
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import re
import datetime
def Future_weather_states(forecast, save_path, day_num=5):
'''
展示未来的天气预报趋势图
:param forecast: 天气预报预测的数据
:param day_num: 未来几天
:return: 趋势图
'''
future_forecast = forecast
dict = {}
for i in range(day_num):
data = []
date = future_forecast[i]["date"]
date = int(re.findall("\d+", date)[0])
data.append(int(re.findall("\d+", future_forecast[i]["high"])[0]))
data.append(int(re.findall("\d+", future_forecast[i]["low"])[0]))
data.append(future_forecast[i]["type"])
dict[date] = data
data_list = sorted(dict.items())
date = []
high_temperature = []
low_temperature = []
for each in data_list:
date.append(each[0])
high_temperature.append(each[1][0])
low_temperature.append(each[1][1])
fig = plt.plot(date, high_temperature, "r", date, low_temperature, "b")
current_date = datetime.datetime.now().strftime('%Y-%m')
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
plt.xlabel(current_date)
plt.ylabel("℃")
plt.legend(["高温", "低温"])
plt.xticks(date)
plt.title("最近几天温度变化趋势")
plt.savefig(save_path)
"""
发送到企业微信
"""
# -*- coding: utf-8 -*-
import requests
import json
class DLF:
def __init__(self, corpid, corpsecret):
self.url = "https://qyapi.weixin.qq.com/cgi-bin"
self.corpid = corpid
self.corpsecret = corpsecret
self._token = self._get_token()
def _get_token(self):
'''
获取企业微信API接口的access_token
:return:
'''
token_url = self.url + "/gettoken?corpid=%s&corpsecret=%s" % (self.corpid, self.corpsecret)
try:
res = requests.get(token_url).json()
token = res['access_token']
return token
except Exception as e:
return str(e)
def _get_media_id(self, file_obj):
get_media_url = self.url + "/media/upload?access_token={}&type=file".format(self._token)
data = {"media": file_obj}
try:
res = requests.post(url=get_media_url, files=data)
media_id = res.json()['media_id']
return media_id
except Exception as e:
return str(e)
def send_text(self, agentid, content, touser=None, toparty=None):
send_msg_url = self.url + "/message/send?access_token=%s" % (self._token)
send_data = {
"touser": touser,
"toparty": toparty,
"msgtype": "text",
"agentid": agentid,
"text": {
"content": content
}
}
try:
res = requests.post(send_msg_url, data=json.dumps(send_data))
except Exception as e:
return str(e)
def send_image(self, agentid, file_obj, touser=None, toparty=None):
media_id = self._get_media_id(file_obj)
send_msg_url = self.url + "/message/send?access_token=%s" % (self._token)
send_data = {
"touser": touser,
"toparty": toparty,
"msgtype": "image",
"agentid": agentid,
"image": {
"media_id": media_id
}
}
try:
res = requests.post(send_msg_url, data=json.dumps(send_data))
except Exception as e:
return str(e)
"""
main脚本
"""
# -*- coding: utf-8 -*-
from plugins.weather_forecast import weather
from plugins.trend_chart import Future_weather_states
from plugins.send_wechat import DLF
import os
# 企业微信相关信息
corpid = "xxx"
corpsecret = "xxx"
agentid = "xxx"
# 天气预报趋势图保存路径
_path = os.path.dirname(os.path.abspath(__file__))
save_path = os.path.join(_path, './tmp/weather_forecast.jpg')
# 获取天气预报信息
content = weather("大兴")
# 发送文字消息
dlf = DLF(corpid, corpsecret)
dlf.send_text(agentid=agentid, content=content['res'], toparty='1')
# 生成天气预报趋势图
Future_weather_states(content['source_data']['forecast'], save_path)
# 发送图片消息
file_obj = open(save_path, 'rb')
dlf.send_image(agentid=agentid, toparty='1', file_obj=file_obj)
| 26.652632 | 138 | 0.582346 | 2,106 | 0.390579 | 0 | 0 | 0 | 0 | 0 | 0 | 1,479 | 0.274295 |
a62e40b59d5193d55d8c169993defb3ea0af6a2d | 161,186 | py | Python | tests/test_drive_sample.py | chyroc/pylark | a54cce6b814935fd3c72668b262b54c8ee461484 | [
"Apache-2.0"
] | 7 | 2021-08-18T00:42:05.000Z | 2022-03-14T09:49:15.000Z | tests/test_drive_sample.py | chyroc/pylark | a54cce6b814935fd3c72668b262b54c8ee461484 | [
"Apache-2.0"
] | null | null | null | tests/test_drive_sample.py | chyroc/pylark | a54cce6b814935fd3c72668b262b54c8ee461484 | [
"Apache-2.0"
] | 1 | 2022-03-14T09:49:20.000Z | 2022-03-14T09:49:20.000Z | # Code generated by lark_sdk_gen. DO NOT EDIT.
import unittest
import pylark
import pytest
from tests.test_conf import app_all_permission, app_no_permission
from tests.test_helper import mock_get_tenant_access_token_failed
def mock(*args, **kwargs):
raise pylark.PyLarkError(scope="scope", func="func", code=1, msg="mock-failed")
def mock_raw_request(*args, **kwargs):
raise pylark.PyLarkError(
scope="scope", func="func", code=1, msg="mock-raw-request-failed"
)
# mock get token
class TestDriveSampleMockGetTokenFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleMockGetTokenFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.cli.auth.get_tenant_access_token = mock_get_tenant_access_token_failed
self.cli.auth.get_app_access_token = mock_get_tenant_access_token_failed
self.module_cli = self.cli.drive
def test_mock_get_token_get_drive_file_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(pylark.CreateDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_copy_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(pylark.CopyDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(pylark.DeleteDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_sheet_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(pylark.DeleteDriveSheetFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_folder(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(pylark.CreateDriveFolderReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(pylark.GetDriveFolderMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_root_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_folder_children(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_file_statistics(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_download_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(pylark.DownloadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_prepare_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_part_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_finish_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_download_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(pylark.DownloadDriveMediaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_prepare_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_part_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_finish_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_transfer_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_member_permission_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_check_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_public_permission_v1_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_public_permission_v2_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_public_permission_v2(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_public_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_get_drive_media_tmp_download_url(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_comment_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(pylark.GetDriveCommentListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(pylark.GetDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(pylark.CreateDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(pylark.UpdateDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(pylark.DeleteDriveCommentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_drive_comment_patch(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_doc(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_doc_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(pylark.GetDriveDocContentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_doc_raw_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(pylark.GetDriveDocRawContentReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_doc_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(pylark.GetDriveDocMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(pylark.GetSheetMetaReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_property(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(pylark.UpdateSheetPropertyReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_update_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(pylark.BatchUpdateSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_import_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(pylark.GetDriveImportTaskReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_move_sheet_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(pylark.MoveSheetDimensionReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_prepend_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(pylark.PrependSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_append_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(pylark.AppendSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_insert_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_add_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(pylark.GetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(pylark.BatchGetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(pylark.SetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(pylark.BatchSetSheetValueReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(pylark.SetSheetStyleReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_batch_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(pylark.BatchSetSheetStyleReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_merge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(pylark.MergeSheetCellReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_unmerge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(pylark.UnmergeSheetCellReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_set_sheet_value_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(pylark.SetSheetValueImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_find_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(pylark.FindSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_replace_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(pylark.ReplaceSheetReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(pylark.CreateSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(pylark.DeleteSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(pylark.UpdateSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(pylark.GetSheetFilterReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(pylark.CreateSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(pylark.DeleteSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(pylark.UpdateSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(pylark.GetSheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_query_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(pylark.QuerySheetFilterViewReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_query_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(pylark.CreateSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_delete_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(pylark.DeleteSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(pylark.UpdateSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(pylark.GetSheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_query_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(pylark.QuerySheetFloatImageReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_space_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_space(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(pylark.GetWikiSpaceReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_update_wiki_space_setting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq()
)
assert "msg=failed" in f"{e}"
def test_mock_get_token_add_wiki_space_member(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(pylark.AddWikiSpaceMemberReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_create_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(pylark.CreateWikiNodeReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_node_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(pylark.GetWikiNodeListReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_get_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert "msg=failed" in f"{e}"
def test_mock_get_token_move_docs_to_wiki(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(pylark.MoveDocsToWikiReq())
assert "msg=failed" in f"{e}"
# mock mock self func
class TestDriveSampleMockSelfFuncFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleMockSelfFuncFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.drive
def test_mock_self_func_get_drive_file_meta(self):
origin_func = self.module_cli.get_drive_file_meta
self.module_cli.get_drive_file_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_file_meta = origin_func
def test_mock_self_func_create_drive_file(self):
origin_func = self.module_cli.create_drive_file
self.module_cli.create_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(pylark.CreateDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_file = origin_func
def test_mock_self_func_copy_drive_file(self):
origin_func = self.module_cli.copy_drive_file
self.module_cli.copy_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(pylark.CopyDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.copy_drive_file = origin_func
def test_mock_self_func_delete_drive_file(self):
origin_func = self.module_cli.delete_drive_file
self.module_cli.delete_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(pylark.DeleteDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_file = origin_func
def test_mock_self_func_delete_drive_sheet_file(self):
origin_func = self.module_cli.delete_drive_sheet_file
self.module_cli.delete_drive_sheet_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(pylark.DeleteDriveSheetFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_sheet_file = origin_func
def test_mock_self_func_create_drive_folder(self):
origin_func = self.module_cli.create_drive_folder
self.module_cli.create_drive_folder = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(pylark.CreateDriveFolderReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_folder = origin_func
def test_mock_self_func_get_drive_folder_meta(self):
origin_func = self.module_cli.get_drive_folder_meta
self.module_cli.get_drive_folder_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(pylark.GetDriveFolderMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_folder_meta = origin_func
def test_mock_self_func_get_drive_root_folder_meta(self):
origin_func = self.module_cli.get_drive_root_folder_meta
self.module_cli.get_drive_root_folder_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_root_folder_meta = origin_func
def test_mock_self_func_get_drive_folder_children(self):
origin_func = self.module_cli.get_drive_folder_children
self.module_cli.get_drive_folder_children = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_folder_children = origin_func
def test_mock_self_func_get_drive_file_statistics(self):
origin_func = self.module_cli.get_drive_file_statistics
self.module_cli.get_drive_file_statistics = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_file_statistics = origin_func
def test_mock_self_func_download_drive_file(self):
origin_func = self.module_cli.download_drive_file
self.module_cli.download_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(pylark.DownloadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.download_drive_file = origin_func
def test_mock_self_func_upload_drive_file(self):
origin_func = self.module_cli.upload_drive_file
self.module_cli.upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.upload_drive_file = origin_func
def test_mock_self_func_prepare_upload_drive_file(self):
origin_func = self.module_cli.prepare_upload_drive_file
self.module_cli.prepare_upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.prepare_upload_drive_file = origin_func
def test_mock_self_func_part_upload_drive_file(self):
origin_func = self.module_cli.part_upload_drive_file
self.module_cli.part_upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.part_upload_drive_file = origin_func
def test_mock_self_func_finish_upload_drive_file(self):
origin_func = self.module_cli.finish_upload_drive_file
self.module_cli.finish_upload_drive_file = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.finish_upload_drive_file = origin_func
def test_mock_self_func_download_drive_media(self):
origin_func = self.module_cli.download_drive_media
self.module_cli.download_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(pylark.DownloadDriveMediaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.download_drive_media = origin_func
def test_mock_self_func_upload_drive_media(self):
origin_func = self.module_cli.upload_drive_media
self.module_cli.upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.upload_drive_media = origin_func
def test_mock_self_func_prepare_upload_drive_media(self):
origin_func = self.module_cli.prepare_upload_drive_media
self.module_cli.prepare_upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.prepare_upload_drive_media = origin_func
def test_mock_self_func_part_upload_drive_media(self):
origin_func = self.module_cli.part_upload_drive_media
self.module_cli.part_upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.part_upload_drive_media = origin_func
def test_mock_self_func_finish_upload_drive_media(self):
origin_func = self.module_cli.finish_upload_drive_media
self.module_cli.finish_upload_drive_media = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.finish_upload_drive_media = origin_func
def test_mock_self_func_create_drive_member_permission_old(self):
origin_func = self.module_cli.create_drive_member_permission_old
self.module_cli.create_drive_member_permission_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_member_permission_old = origin_func
def test_mock_self_func_transfer_drive_member_permission(self):
origin_func = self.module_cli.transfer_drive_member_permission
self.module_cli.transfer_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.transfer_drive_member_permission = origin_func
def test_mock_self_func_get_drive_member_permission_list(self):
origin_func = self.module_cli.get_drive_member_permission_list
self.module_cli.get_drive_member_permission_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_member_permission_list = origin_func
def test_mock_self_func_create_drive_member_permission(self):
origin_func = self.module_cli.create_drive_member_permission
self.module_cli.create_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_member_permission = origin_func
def test_mock_self_func_delete_drive_member_permission_old(self):
origin_func = self.module_cli.delete_drive_member_permission_old
self.module_cli.delete_drive_member_permission_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_member_permission_old = origin_func
def test_mock_self_func_delete_drive_member_permission(self):
origin_func = self.module_cli.delete_drive_member_permission
self.module_cli.delete_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_member_permission = origin_func
def test_mock_self_func_update_drive_member_permission_old(self):
origin_func = self.module_cli.update_drive_member_permission_old
self.module_cli.update_drive_member_permission_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_member_permission_old = origin_func
def test_mock_self_func_update_drive_member_permission(self):
origin_func = self.module_cli.update_drive_member_permission
self.module_cli.update_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_member_permission = origin_func
def test_mock_self_func_check_drive_member_permission(self):
origin_func = self.module_cli.check_drive_member_permission
self.module_cli.check_drive_member_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.check_drive_member_permission = origin_func
def test_mock_self_func_update_drive_public_permission_v1_old(self):
origin_func = self.module_cli.update_drive_public_permission_v1_old
self.module_cli.update_drive_public_permission_v1_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_public_permission_v1_old = origin_func
def test_mock_self_func_update_drive_public_permission_v2_old(self):
origin_func = self.module_cli.update_drive_public_permission_v2_old
self.module_cli.update_drive_public_permission_v2_old = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_public_permission_v2_old = origin_func
def test_mock_self_func_get_drive_public_permission_v2(self):
origin_func = self.module_cli.get_drive_public_permission_v2
self.module_cli.get_drive_public_permission_v2 = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_public_permission_v2 = origin_func
def test_mock_self_func_update_drive_public_permission(self):
origin_func = self.module_cli.update_drive_public_permission
self.module_cli.update_drive_public_permission = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_public_permission = origin_func
def test_mock_self_func_batch_get_drive_media_tmp_download_url(self):
origin_func = self.module_cli.batch_get_drive_media_tmp_download_url
self.module_cli.batch_get_drive_media_tmp_download_url = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_get_drive_media_tmp_download_url = origin_func
def test_mock_self_func_get_drive_comment_list(self):
origin_func = self.module_cli.get_drive_comment_list
self.module_cli.get_drive_comment_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(pylark.GetDriveCommentListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_comment_list = origin_func
def test_mock_self_func_get_drive_comment(self):
origin_func = self.module_cli.get_drive_comment
self.module_cli.get_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(pylark.GetDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_comment = origin_func
def test_mock_self_func_create_drive_comment(self):
origin_func = self.module_cli.create_drive_comment
self.module_cli.create_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(pylark.CreateDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_comment = origin_func
def test_mock_self_func_update_drive_comment(self):
origin_func = self.module_cli.update_drive_comment
self.module_cli.update_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(pylark.UpdateDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_comment = origin_func
def test_mock_self_func_delete_drive_comment(self):
origin_func = self.module_cli.delete_drive_comment
self.module_cli.delete_drive_comment = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(pylark.DeleteDriveCommentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_drive_comment = origin_func
def test_mock_self_func_update_drive_comment_patch(self):
origin_func = self.module_cli.update_drive_comment_patch
self.module_cli.update_drive_comment_patch = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_drive_comment_patch = origin_func
def test_mock_self_func_create_drive_doc(self):
origin_func = self.module_cli.create_drive_doc
self.module_cli.create_drive_doc = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_doc = origin_func
def test_mock_self_func_get_drive_doc_content(self):
origin_func = self.module_cli.get_drive_doc_content
self.module_cli.get_drive_doc_content = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(pylark.GetDriveDocContentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_doc_content = origin_func
def test_mock_self_func_get_drive_doc_raw_content(self):
origin_func = self.module_cli.get_drive_doc_raw_content
self.module_cli.get_drive_doc_raw_content = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(pylark.GetDriveDocRawContentReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_doc_raw_content = origin_func
def test_mock_self_func_get_drive_doc_meta(self):
origin_func = self.module_cli.get_drive_doc_meta
self.module_cli.get_drive_doc_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(pylark.GetDriveDocMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_doc_meta = origin_func
def test_mock_self_func_create_sheet(self):
origin_func = self.module_cli.create_sheet
self.module_cli.create_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet = origin_func
def test_mock_self_func_get_sheet_meta(self):
origin_func = self.module_cli.get_sheet_meta
self.module_cli.get_sheet_meta = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(pylark.GetSheetMetaReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_meta = origin_func
def test_mock_self_func_update_sheet_property(self):
origin_func = self.module_cli.update_sheet_property
self.module_cli.update_sheet_property = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(pylark.UpdateSheetPropertyReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_property = origin_func
def test_mock_self_func_batch_update_sheet(self):
origin_func = self.module_cli.batch_update_sheet
self.module_cli.batch_update_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(pylark.BatchUpdateSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_update_sheet = origin_func
def test_mock_self_func_import_sheet(self):
origin_func = self.module_cli.import_sheet
self.module_cli.import_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.import_sheet = origin_func
def test_mock_self_func_create_drive_import_task(self):
origin_func = self.module_cli.create_drive_import_task
self.module_cli.create_drive_import_task = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_drive_import_task = origin_func
def test_mock_self_func_get_drive_import_task(self):
origin_func = self.module_cli.get_drive_import_task
self.module_cli.get_drive_import_task = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(pylark.GetDriveImportTaskReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_drive_import_task = origin_func
def test_mock_self_func_move_sheet_dimension(self):
origin_func = self.module_cli.move_sheet_dimension
self.module_cli.move_sheet_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(pylark.MoveSheetDimensionReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.move_sheet_dimension = origin_func
def test_mock_self_func_prepend_sheet_value(self):
origin_func = self.module_cli.prepend_sheet_value
self.module_cli.prepend_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(pylark.PrependSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.prepend_sheet_value = origin_func
def test_mock_self_func_append_sheet_value(self):
origin_func = self.module_cli.append_sheet_value
self.module_cli.append_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(pylark.AppendSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.append_sheet_value = origin_func
def test_mock_self_func_insert_sheet_dimension_range(self):
origin_func = self.module_cli.insert_sheet_dimension_range
self.module_cli.insert_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.insert_sheet_dimension_range = origin_func
def test_mock_self_func_add_sheet_dimension_range(self):
origin_func = self.module_cli.add_sheet_dimension_range
self.module_cli.add_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.add_sheet_dimension_range = origin_func
def test_mock_self_func_update_sheet_dimension_range(self):
origin_func = self.module_cli.update_sheet_dimension_range
self.module_cli.update_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_dimension_range = origin_func
def test_mock_self_func_delete_sheet_dimension_range(self):
origin_func = self.module_cli.delete_sheet_dimension_range
self.module_cli.delete_sheet_dimension_range = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_dimension_range = origin_func
def test_mock_self_func_get_sheet_value(self):
origin_func = self.module_cli.get_sheet_value
self.module_cli.get_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(pylark.GetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_value = origin_func
def test_mock_self_func_batch_get_sheet_value(self):
origin_func = self.module_cli.batch_get_sheet_value
self.module_cli.batch_get_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(pylark.BatchGetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_get_sheet_value = origin_func
def test_mock_self_func_set_sheet_value(self):
origin_func = self.module_cli.set_sheet_value
self.module_cli.set_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(pylark.SetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_sheet_value = origin_func
def test_mock_self_func_batch_set_sheet_value(self):
origin_func = self.module_cli.batch_set_sheet_value
self.module_cli.batch_set_sheet_value = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(pylark.BatchSetSheetValueReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_set_sheet_value = origin_func
def test_mock_self_func_set_sheet_style(self):
origin_func = self.module_cli.set_sheet_style
self.module_cli.set_sheet_style = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(pylark.SetSheetStyleReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_sheet_style = origin_func
def test_mock_self_func_batch_set_sheet_style(self):
origin_func = self.module_cli.batch_set_sheet_style
self.module_cli.batch_set_sheet_style = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(pylark.BatchSetSheetStyleReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.batch_set_sheet_style = origin_func
def test_mock_self_func_merge_sheet_cell(self):
origin_func = self.module_cli.merge_sheet_cell
self.module_cli.merge_sheet_cell = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(pylark.MergeSheetCellReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.merge_sheet_cell = origin_func
def test_mock_self_func_unmerge_sheet_cell(self):
origin_func = self.module_cli.unmerge_sheet_cell
self.module_cli.unmerge_sheet_cell = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(pylark.UnmergeSheetCellReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.unmerge_sheet_cell = origin_func
def test_mock_self_func_set_sheet_value_image(self):
origin_func = self.module_cli.set_sheet_value_image
self.module_cli.set_sheet_value_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(pylark.SetSheetValueImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.set_sheet_value_image = origin_func
def test_mock_self_func_find_sheet(self):
origin_func = self.module_cli.find_sheet
self.module_cli.find_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(pylark.FindSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.find_sheet = origin_func
def test_mock_self_func_replace_sheet(self):
origin_func = self.module_cli.replace_sheet
self.module_cli.replace_sheet = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(pylark.ReplaceSheetReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.replace_sheet = origin_func
def test_mock_self_func_create_sheet_condition_format(self):
origin_func = self.module_cli.create_sheet_condition_format
self.module_cli.create_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_condition_format = origin_func
def test_mock_self_func_get_sheet_condition_format(self):
origin_func = self.module_cli.get_sheet_condition_format
self.module_cli.get_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_condition_format = origin_func
def test_mock_self_func_update_sheet_condition_format(self):
origin_func = self.module_cli.update_sheet_condition_format
self.module_cli.update_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_condition_format = origin_func
def test_mock_self_func_delete_sheet_condition_format(self):
origin_func = self.module_cli.delete_sheet_condition_format
self.module_cli.delete_sheet_condition_format = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_condition_format = origin_func
def test_mock_self_func_create_sheet_protected_dimension(self):
origin_func = self.module_cli.create_sheet_protected_dimension
self.module_cli.create_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_protected_dimension = origin_func
def test_mock_self_func_get_sheet_protected_dimension(self):
origin_func = self.module_cli.get_sheet_protected_dimension
self.module_cli.get_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_protected_dimension = origin_func
def test_mock_self_func_update_sheet_protected_dimension(self):
origin_func = self.module_cli.update_sheet_protected_dimension
self.module_cli.update_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_protected_dimension = origin_func
def test_mock_self_func_delete_sheet_protected_dimension(self):
origin_func = self.module_cli.delete_sheet_protected_dimension
self.module_cli.delete_sheet_protected_dimension = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_protected_dimension = origin_func
def test_mock_self_func_create_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.create_sheet_data_validation_dropdown
self.module_cli.create_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_delete_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.delete_sheet_data_validation_dropdown
self.module_cli.delete_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_update_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.update_sheet_data_validation_dropdown
self.module_cli.update_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_get_sheet_data_validation_dropdown(self):
origin_func = self.module_cli.get_sheet_data_validation_dropdown
self.module_cli.get_sheet_data_validation_dropdown = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_data_validation_dropdown = origin_func
def test_mock_self_func_create_sheet_filter(self):
origin_func = self.module_cli.create_sheet_filter
self.module_cli.create_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(pylark.CreateSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_filter = origin_func
def test_mock_self_func_delete_sheet_filter(self):
origin_func = self.module_cli.delete_sheet_filter
self.module_cli.delete_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(pylark.DeleteSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_filter = origin_func
def test_mock_self_func_update_sheet_filter(self):
origin_func = self.module_cli.update_sheet_filter
self.module_cli.update_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(pylark.UpdateSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_filter = origin_func
def test_mock_self_func_get_sheet_filter(self):
origin_func = self.module_cli.get_sheet_filter
self.module_cli.get_sheet_filter = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(pylark.GetSheetFilterReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_filter = origin_func
def test_mock_self_func_create_sheet_filter_view(self):
origin_func = self.module_cli.create_sheet_filter_view
self.module_cli.create_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(pylark.CreateSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_filter_view = origin_func
def test_mock_self_func_delete_sheet_filter_view(self):
origin_func = self.module_cli.delete_sheet_filter_view
self.module_cli.delete_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(pylark.DeleteSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_filter_view = origin_func
def test_mock_self_func_update_sheet_filter_view(self):
origin_func = self.module_cli.update_sheet_filter_view
self.module_cli.update_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(pylark.UpdateSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_filter_view = origin_func
def test_mock_self_func_get_sheet_filter_view(self):
origin_func = self.module_cli.get_sheet_filter_view
self.module_cli.get_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(pylark.GetSheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_filter_view = origin_func
def test_mock_self_func_query_sheet_filter_view(self):
origin_func = self.module_cli.query_sheet_filter_view
self.module_cli.query_sheet_filter_view = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(pylark.QuerySheetFilterViewReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.query_sheet_filter_view = origin_func
def test_mock_self_func_create_sheet_filter_view_condition(self):
origin_func = self.module_cli.create_sheet_filter_view_condition
self.module_cli.create_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_filter_view_condition = origin_func
def test_mock_self_func_delete_sheet_filter_view_condition(self):
origin_func = self.module_cli.delete_sheet_filter_view_condition
self.module_cli.delete_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_filter_view_condition = origin_func
def test_mock_self_func_update_sheet_filter_view_condition(self):
origin_func = self.module_cli.update_sheet_filter_view_condition
self.module_cli.update_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_filter_view_condition = origin_func
def test_mock_self_func_get_sheet_filter_view_condition(self):
origin_func = self.module_cli.get_sheet_filter_view_condition
self.module_cli.get_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_filter_view_condition = origin_func
def test_mock_self_func_query_sheet_filter_view_condition(self):
origin_func = self.module_cli.query_sheet_filter_view_condition
self.module_cli.query_sheet_filter_view_condition = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.query_sheet_filter_view_condition = origin_func
def test_mock_self_func_create_sheet_float_image(self):
origin_func = self.module_cli.create_sheet_float_image
self.module_cli.create_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(pylark.CreateSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_sheet_float_image = origin_func
def test_mock_self_func_delete_sheet_float_image(self):
origin_func = self.module_cli.delete_sheet_float_image
self.module_cli.delete_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(pylark.DeleteSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.delete_sheet_float_image = origin_func
def test_mock_self_func_update_sheet_float_image(self):
origin_func = self.module_cli.update_sheet_float_image
self.module_cli.update_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(pylark.UpdateSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_sheet_float_image = origin_func
def test_mock_self_func_get_sheet_float_image(self):
origin_func = self.module_cli.get_sheet_float_image
self.module_cli.get_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(pylark.GetSheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_sheet_float_image = origin_func
def test_mock_self_func_query_sheet_float_image(self):
origin_func = self.module_cli.query_sheet_float_image
self.module_cli.query_sheet_float_image = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(pylark.QuerySheetFloatImageReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.query_sheet_float_image = origin_func
def test_mock_self_func_get_wiki_space_list(self):
origin_func = self.module_cli.get_wiki_space_list
self.module_cli.get_wiki_space_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_space_list = origin_func
def test_mock_self_func_get_wiki_space(self):
origin_func = self.module_cli.get_wiki_space
self.module_cli.get_wiki_space = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(pylark.GetWikiSpaceReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_space = origin_func
def test_mock_self_func_update_wiki_space_setting(self):
origin_func = self.module_cli.update_wiki_space_setting
self.module_cli.update_wiki_space_setting = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq()
)
assert "msg=mock-failed" in f"{e}"
self.module_cli.update_wiki_space_setting = origin_func
def test_mock_self_func_add_wiki_space_member(self):
origin_func = self.module_cli.add_wiki_space_member
self.module_cli.add_wiki_space_member = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(pylark.AddWikiSpaceMemberReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.add_wiki_space_member = origin_func
def test_mock_self_func_create_wiki_node(self):
origin_func = self.module_cli.create_wiki_node
self.module_cli.create_wiki_node = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(pylark.CreateWikiNodeReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.create_wiki_node = origin_func
def test_mock_self_func_get_wiki_node_list(self):
origin_func = self.module_cli.get_wiki_node_list
self.module_cli.get_wiki_node_list = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(pylark.GetWikiNodeListReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_node_list = origin_func
def test_mock_self_func_get_wiki_node(self):
origin_func = self.module_cli.get_wiki_node
self.module_cli.get_wiki_node = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.get_wiki_node = origin_func
def test_mock_self_func_move_docs_to_wiki(self):
origin_func = self.module_cli.move_docs_to_wiki
self.module_cli.move_docs_to_wiki = mock
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(pylark.MoveDocsToWikiReq())
assert "msg=mock-failed" in f"{e}"
self.module_cli.move_docs_to_wiki = origin_func
# mock raw request
class TestDriveSampleMockRawRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleMockRawRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_all_permission.ins()
self.module_cli = self.cli.drive
self.cli.raw_request = mock_raw_request
def test_mock_raw_request_get_drive_file_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(
pylark.CreateDriveFileReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_copy_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(
pylark.CopyDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(
pylark.DeleteDriveFileReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_sheet_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(
pylark.DeleteDriveSheetFileReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_folder(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(
pylark.CreateDriveFolderReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(
pylark.GetDriveFolderMetaReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_root_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_folder_children(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_file_statistics(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_download_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(
pylark.DownloadDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_prepare_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_part_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_finish_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_download_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(
pylark.DownloadDriveMediaReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_prepare_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_part_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_finish_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_transfer_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_member_permission_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_check_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_public_permission_v1_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_public_permission_v2_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_public_permission_v2(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_public_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_get_drive_media_tmp_download_url(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_comment_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(
pylark.GetDriveCommentListReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(
pylark.GetDriveCommentReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(
pylark.CreateDriveCommentReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(
pylark.UpdateDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(
pylark.DeleteDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_drive_comment_patch(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_doc(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_doc_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(
pylark.GetDriveDocContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_doc_raw_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(
pylark.GetDriveDocRawContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_doc_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(
pylark.GetDriveDocMetaReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(
pylark.GetSheetMetaReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_property(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(
pylark.UpdateSheetPropertyReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_update_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(
pylark.BatchUpdateSheetReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_import_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(
pylark.GetDriveImportTaskReq(
ticket="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_move_sheet_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(
pylark.MoveSheetDimensionReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_prepend_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(
pylark.PrependSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_append_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(
pylark.AppendSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_insert_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_add_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(
pylark.GetSheetValueReq(
spreadsheet_token="x",
range_="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(
pylark.BatchGetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(
pylark.SetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(
pylark.BatchSetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(
pylark.SetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_batch_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(
pylark.BatchSetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_merge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(
pylark.MergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_unmerge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(
pylark.UnmergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_set_sheet_value_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(
pylark.SetSheetValueImageReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_find_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(
pylark.FindSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_replace_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(
pylark.ReplaceSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq(
spreadsheet_token="x",
sheet_id="x",
data_validation_id=1,
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(
pylark.CreateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(
pylark.DeleteSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(
pylark.UpdateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(
pylark.GetSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(
pylark.CreateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(
pylark.DeleteSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(
pylark.UpdateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(
pylark.GetSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_query_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(
pylark.QuerySheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_query_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(
pylark.CreateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_delete_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(
pylark.DeleteSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(
pylark.UpdateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(
pylark.GetSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_query_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(
pylark.QuerySheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_space_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_space(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(
pylark.GetWikiSpaceReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_update_wiki_space_setting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_add_wiki_space_member(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(
pylark.AddWikiSpaceMemberReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_create_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(
pylark.CreateWikiNodeReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_node_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(
pylark.GetWikiNodeListReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_get_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
def test_mock_raw_request_move_docs_to_wiki(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(
pylark.MoveDocsToWikiReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
assert "mock-raw-request-failed" in e.value.msg
# real request
class TestDriveSampleRealRequestFailed(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDriveSampleRealRequestFailed, self).__init__(*args, **kwargs)
self.cli = app_no_permission.ins()
self.module_cli = self.cli.drive
def test_real_request_get_drive_file_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_meta(pylark.GetDriveFileMetaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_file(
pylark.CreateDriveFileReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_copy_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.copy_drive_file(
pylark.CopyDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_file(
pylark.DeleteDriveFileReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_sheet_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_sheet_file(
pylark.DeleteDriveSheetFileReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_folder(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_folder(
pylark.CreateDriveFolderReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_meta(
pylark.GetDriveFolderMetaReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_root_folder_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_root_folder_meta(
pylark.GetDriveRootFolderMetaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_folder_children(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_folder_children(
pylark.GetDriveFolderChildrenReq(
folder_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_file_statistics(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_file_statistics(
pylark.GetDriveFileStatisticsReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_download_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_file(
pylark.DownloadDriveFileReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_file(pylark.UploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_prepare_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_file(
pylark.PrepareUploadDriveFileReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_part_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_file(pylark.PartUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_finish_upload_drive_file(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_file(pylark.FinishUploadDriveFileReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_download_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.download_drive_media(
pylark.DownloadDriveMediaReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.upload_drive_media(pylark.UploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_prepare_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepare_upload_drive_media(
pylark.PrepareUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_part_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.part_upload_drive_media(pylark.PartUploadDriveMediaReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_finish_upload_drive_media(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.finish_upload_drive_media(
pylark.FinishUploadDriveMediaReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission_old(
pylark.CreateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_transfer_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.transfer_drive_member_permission(
pylark.TransferDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_member_permission_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_member_permission_list(
pylark.GetDriveMemberPermissionListReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_member_permission(
pylark.CreateDriveMemberPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission_old(
pylark.DeleteDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_member_permission(
pylark.DeleteDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_member_permission_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission_old(
pylark.UpdateDriveMemberPermissionOldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_member_permission(
pylark.UpdateDriveMemberPermissionReq(
token="x",
member_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_check_drive_member_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.check_drive_member_permission(
pylark.CheckDriveMemberPermissionReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_public_permission_v1_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v1_old(
pylark.UpdateDrivePublicPermissionV1OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_public_permission_v2_old(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission_v2_old(
pylark.UpdateDrivePublicPermissionV2OldReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_public_permission_v2(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_public_permission_v2(
pylark.GetDrivePublicPermissionV2Req()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_public_permission(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_public_permission(
pylark.UpdateDrivePublicPermissionReq(
token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_get_drive_media_tmp_download_url(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_drive_media_tmp_download_url(
pylark.BatchGetDriveMediaTmpDownloadURLReq()
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_comment_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment_list(
pylark.GetDriveCommentListReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_comment(
pylark.GetDriveCommentReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_comment(
pylark.CreateDriveCommentReq(
file_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment(
pylark.UpdateDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_drive_comment(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_drive_comment(
pylark.DeleteDriveCommentReq(
file_token="x",
comment_id="x",
reply_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_drive_comment_patch(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_drive_comment_patch(
pylark.UpdateDriveCommentPatchReq(
file_token="x",
comment_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_doc(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_doc(pylark.CreateDriveDocReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_doc_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_content(
pylark.GetDriveDocContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_doc_raw_content(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_raw_content(
pylark.GetDriveDocRawContentReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_doc_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_doc_meta(
pylark.GetDriveDocMetaReq(
doc_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet(pylark.CreateSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_meta(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_meta(
pylark.GetSheetMetaReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_property(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_property(
pylark.UpdateSheetPropertyReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_update_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_update_sheet(
pylark.BatchUpdateSheetReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_import_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.import_sheet(pylark.ImportSheetReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_drive_import_task(pylark.CreateDriveImportTaskReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_drive_import_task(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_drive_import_task(
pylark.GetDriveImportTaskReq(
ticket="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_move_sheet_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_sheet_dimension(
pylark.MoveSheetDimensionReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_prepend_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.prepend_sheet_value(
pylark.PrependSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_append_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.append_sheet_value(
pylark.AppendSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_insert_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.insert_sheet_dimension_range(
pylark.InsertSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_add_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_sheet_dimension_range(
pylark.AddSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_dimension_range(
pylark.UpdateSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_dimension_range(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_dimension_range(
pylark.DeleteSheetDimensionRangeReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_value(
pylark.GetSheetValueReq(
spreadsheet_token="x",
range_="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_get_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_get_sheet_value(
pylark.BatchGetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value(
pylark.SetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_set_sheet_value(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_value(
pylark.BatchSetSheetValueReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_style(
pylark.SetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_batch_set_sheet_style(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.batch_set_sheet_style(
pylark.BatchSetSheetStyleReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_merge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.merge_sheet_cell(
pylark.MergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_unmerge_sheet_cell(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.unmerge_sheet_cell(
pylark.UnmergeSheetCellReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_set_sheet_value_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.set_sheet_value_image(
pylark.SetSheetValueImageReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_find_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.find_sheet(
pylark.FindSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_replace_sheet(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.replace_sheet(
pylark.ReplaceSheetReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_condition_format(
pylark.CreateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_condition_format(
pylark.GetSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_condition_format(
pylark.UpdateSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_condition_format(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_condition_format(
pylark.DeleteSheetConditionFormatReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_protected_dimension(
pylark.CreateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_protected_dimension(
pylark.GetSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_protected_dimension(
pylark.UpdateSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_protected_dimension(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_protected_dimension(
pylark.DeleteSheetProtectedDimensionReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_data_validation_dropdown(
pylark.CreateSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_data_validation_dropdown(
pylark.DeleteSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_data_validation_dropdown(
pylark.UpdateSheetDataValidationDropdownReq(
spreadsheet_token="x",
sheet_id="x",
data_validation_id=1,
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_data_validation_dropdown(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_data_validation_dropdown(
pylark.GetSheetDataValidationDropdownReq(
spreadsheet_token="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter(
pylark.CreateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter(
pylark.DeleteSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter(
pylark.UpdateSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_filter(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter(
pylark.GetSheetFilterReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view(
pylark.CreateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view(
pylark.DeleteSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view(
pylark.UpdateSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view(
pylark.GetSheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_query_sheet_filter_view(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view(
pylark.QuerySheetFilterViewReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_filter_view_condition(
pylark.CreateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_filter_view_condition(
pylark.DeleteSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_filter_view_condition(
pylark.UpdateSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_filter_view_condition(
pylark.GetSheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
condition_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_query_sheet_filter_view_condition(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_filter_view_condition(
pylark.QuerySheetFilterViewConditionReq(
spreadsheet_token="x",
sheet_id="x",
filter_view_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_sheet_float_image(
pylark.CreateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_delete_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.delete_sheet_float_image(
pylark.DeleteSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_sheet_float_image(
pylark.UpdateSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_sheet_float_image(
pylark.GetSheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
float_image_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_query_sheet_float_image(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.query_sheet_float_image(
pylark.QuerySheetFloatImageReq(
spreadsheet_token="x",
sheet_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_space_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space_list(pylark.GetWikiSpaceListReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_space(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_space(
pylark.GetWikiSpaceReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_update_wiki_space_setting(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.update_wiki_space_setting(
pylark.UpdateWikiSpaceSettingReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_add_wiki_space_member(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.add_wiki_space_member(
pylark.AddWikiSpaceMemberReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_create_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.create_wiki_node(
pylark.CreateWikiNodeReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_node_list(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node_list(
pylark.GetWikiNodeListReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_get_wiki_node(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.get_wiki_node(pylark.GetWikiNodeReq())
assert e.type is pylark.PyLarkError
assert e.value.code > 0
def test_real_request_move_docs_to_wiki(self):
with pytest.raises(pylark.PyLarkError) as e:
self.module_cli.move_docs_to_wiki(
pylark.MoveDocsToWikiReq(
space_id="x",
)
)
assert e.type is pylark.PyLarkError
assert e.value.code > 0
| 36.80886 | 88 | 0.652749 | 160,613 | 0.996445 | 0 | 0 | 0 | 0 | 0 | 0 | 8,075 | 0.050097 |
a62f20a3565292350f831b63d64b71a76ad1a913 | 354 | py | Python | modules/seloger/constants.py | Phyks/Flatisfy | 9e495bb63ec32686e6dc6be566da9672ad014880 | [
"MIT"
] | 15 | 2017-06-07T07:17:47.000Z | 2021-04-22T21:04:32.000Z | modules/seloger/constants.py | Phyks/Flatisfy | 9e495bb63ec32686e6dc6be566da9672ad014880 | [
"MIT"
] | 15 | 2017-06-13T11:12:02.000Z | 2021-03-27T12:28:42.000Z | modules/seloger/constants.py | Phyks/Flatisfy | 9e495bb63ec32686e6dc6be566da9672ad014880 | [
"MIT"
] | 5 | 2017-09-23T20:13:34.000Z | 2021-01-16T09:17:09.000Z | from woob.capabilities.housing import POSTS_TYPES, HOUSE_TYPES
TYPES = {POSTS_TYPES.RENT: 1,
POSTS_TYPES.SALE: 2,
POSTS_TYPES.FURNISHED_RENT: 1,
POSTS_TYPES.VIAGER: 5}
RET = {HOUSE_TYPES.HOUSE: '2',
HOUSE_TYPES.APART: '1',
HOUSE_TYPES.LAND: '4',
HOUSE_TYPES.PARKING: '3',
HOUSE_TYPES.OTHER: '10'}
| 27.230769 | 62 | 0.638418 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 16 | 0.045198 |